Compare commits

..

681 commits

Author SHA1 Message Date
Simon
4332b1beef
not add start time when syncing (#442)
* not add start time when syncing

* use itsoffset for all syncing related time shifts
2024-07-01 18:43:16 +02:00
bytedream
2cf9125de3 Update README.md 2024-07-01 16:38:29 +02:00
bytedream
756022b955 Fix panic when in anonymously 2024-06-20 00:12:33 +02:00
bytedream
509683d23a Update dependencies and version 2024-06-19 23:38:57 +02:00
bytedream
8047680799 Add drm check 2024-06-19 23:18:35 +02:00
bytedream
287df84382 Rework episode filtering 2024-06-14 00:21:07 +02:00
bytedream
e7ac6d8874 Deprecate search stream.is_drm option 2024-05-24 22:17:25 +02:00
bytedream
fb8e535644 Fix subtitle title not being human-readable 2024-05-24 22:09:23 +02:00
bytedream
67c267be20 Remove unused variable 2024-05-24 22:05:04 +02:00
bytedream
a1c7b2069d Update dependencies and version 2024-05-23 00:01:42 +02:00
bytedream
74e5e05b0f Invalidate stream when using search command (#428) 2024-05-22 23:59:12 +02:00
bytedream
7d2ae719c8 Remove internal jwt error retry 2024-05-22 16:54:58 +02:00
bytedream
5593046aae Update dependencies and version 2024-05-22 16:52:43 +02:00
bytedream
f8bd092987 Add custom error message if too many streams are active 2024-05-21 21:51:18 +02:00
bytedream
cbe57e2b6e Update dependencies and version 2024-05-21 21:34:05 +02:00
bytedream
f7ce888329 Bypass stream limits 2024-05-21 21:33:08 +02:00
bytedream
301dac478f Update dependencies and version 2024-05-20 15:57:28 +02:00
bytedream
9819b62259 Fix typo in additional subtitle field (#421) 2024-05-17 23:45:41 +02:00
bytedream
5279a9b759 Update dependencies and version 2024-05-14 23:59:01 +02:00
bytedream
a98e31f959 Only include one CC subtitle 2024-05-14 22:36:59 +02:00
bytedream
590242712b Add warning message the --skip-existing-method has no effect without --skip-existing (#418) 2024-05-14 21:36:12 +02:00
bytedream
817963af4f Fix video containing hardsub if not requested (#415) 2024-05-14 21:22:23 +02:00
bytedream
48bb7a5ef6 Fix crashes when converting subtitles (#408) 2024-05-14 16:11:55 +02:00
Simon
53a710a373
Fix audio syncing using wrong internal index (#407) 2024-05-07 16:13:10 +02:00
bytedream
ab63dcd2e0 Update dependencies and version 2024-05-06 20:31:12 +02:00
bytedream
4d1df83342 Fix build badge 2024-05-05 00:05:10 +02:00
bytedream
89b9c5db39 Update dependencies and version 2024-05-04 23:40:02 +02:00
bytedream
96d3de48cf Add missing code examples 2024-05-04 23:40:02 +02:00
bytedream
dad91dba91 Rename --sync-tolerance to --merge-sync-tolerance and --merge-sync-precision to --merge-sync-precision 2024-05-04 23:39:55 +02:00
bytedream
757d3094ea Rename directory for workflow resources 2024-05-03 21:14:51 +02:00
bytedream
fca1b74cac Separate build and lint pipelines 2024-05-03 21:11:46 +02:00
bytedream
0f7d7d928c Add format check and linting action pipelines 2024-05-03 21:08:34 +02:00
bytedream
f77804fcb5 Apply lints 2024-05-03 20:58:54 +02:00
bytedream
4066b8511c Build binaries locked 2024-05-03 20:51:31 +02:00
bytedream
dcbe433a9c Manually git hash when publishing source AUR package 2024-05-03 20:38:11 +02:00
bytedream
55f1e1d32d Add option to overwrite git hash on build 2024-05-03 20:33:19 +02:00
bytedream
442173c08c Fix empty subtitles if multiple subtitle formats are used (#398) 2024-05-03 13:46:43 +02:00
bytedream
173292ff32 Prettify negated subtitle cc boolean 2024-05-02 17:10:09 +02:00
Simon
72c574c883
Switch to audio fingerprinting based syncing (#393)
* rename merge-auto-tolerance -> merge-time-tolerance

* move format_time_delta to own file

* switch to audio fingerprinting based syncing

* move format_time_delta to own file

* simpler approach to determine negative time deltas

* add missing readme part for --sync-precision

* fix all clippy "errors"

* Use rust-native chromaprint port instead of ffmpeg

* buffer with 128kb instead of 32kb

* improve helps

* improve help

---------

Co-authored-by: bytedream <bytedream@protonmail.com>
2024-05-02 00:35:13 +02:00
Simon
f237033aff
move format_time_delta to own file (#392) 2024-04-28 15:15:23 +02:00
Simon
bf28dbf1ce
rename merge-auto-tolerance to merge-time-tolerance (#391) 2024-04-26 15:50:44 +02:00
bytedream
cf8bfb02ac Automatically cut too long path segments 2024-04-25 20:32:51 +02:00
bytedream
74aaed4e7a Update dependencies and version 2024-04-25 00:49:31 +02:00
bytedream
177ceb1920 Update dependencies and version 2024-04-23 16:13:42 +02:00
bytedream
541f0e2747 Fix wrong audio and subtitle video reference number (#384) 2024-04-23 16:00:53 +02:00
Simon
777b39aba1
Fix: stop skipping every episode with archive command while using a non premium account (#388) 2024-04-22 23:47:49 +02:00
bytedream
4f3475131c Disable LTO in source aur pkgbuild 2024-04-21 13:25:12 +02:00
Simon
177aa37631
Move help for --language-tagging (#385) 2024-04-21 12:40:53 +02:00
bytedream
8fff807ae6 Add message if stored login is expired 2024-04-20 00:23:10 +02:00
bytedream
db6e45e7f4 Update dependencies and version 2024-04-20 00:02:18 +02:00
bytedream
8ada822396 Remove etp-rt login 2024-04-20 00:02:06 +02:00
bytedream
9bdd3aa85b Switch to openssl-tls on nix flake (#359) 2024-04-18 18:45:35 +02:00
Amelia
4fc20c7c1c
Support override fonts (#378)
* Support override fonts

* Compile fix

* Actual compile fix

* Use snake_case
2024-04-14 21:55:55 +02:00
bytedream
6515d3025f Add warn message when using a non-premium account with download or archive 2024-04-14 21:43:24 +02:00
bytedream
fe17f3951e Update dependencies and version 2024-04-14 21:25:17 +02:00
bytedream
cdad7fc000 Skip premium episode if account has no premium subscription 2024-04-14 21:18:13 +02:00
bytedream
d7dac2acd4 Update dependencies and version 2024-04-11 17:06:43 +02:00
bytedream
dbbb445c55 Fix invalid 0% generate video file progress bar 2024-04-10 01:36:20 +02:00
bytedream
733d9f9787 Update dependencies and version 2024-04-10 01:36:10 +02:00
bytedream
0257fdea0d Remove chapters if sync doesn't work 2024-04-09 23:00:01 +02:00
bytedream
9e5feef4d4 Change archive --sync-start defaults 2024-04-09 22:58:28 +02:00
bytedream
b9f5fadbb3 Fix archive --language-tagging sometimes causing crash 2024-04-09 22:50:25 +02:00
bytedream
ea39dcbc71 Embed chapters only to archive merge auto if --sync-start flag is set 2024-04-09 18:59:41 +02:00
bytedream
a73773ce1d Add id to every flag in README 2024-04-09 18:59:33 +02:00
bytedream
0115730d60 Add archive --sync-start flag documentation to README 2024-04-09 18:55:12 +02:00
bytedream
18534b259b Remove deprecated {resolution} output format option 2024-04-08 14:34:50 +02:00
bytedream
77103ff1f1 Update dependencies and version 2024-04-08 14:18:10 +02:00
bytedream
771594a231 Remove hardcoded pixel format (#352) 2024-04-08 14:03:30 +02:00
bytedream
1a511e12f9 Add archive start sync flag 2024-04-08 13:57:06 +02:00
bytedream
fe49161e93 End ffmpeg progress always with at least 100% 2024-04-08 00:37:19 +02:00
bytedream
25cde6163c Add account scope for search command 2024-04-06 21:25:19 +02:00
bytedream
4b74299733 Only run ci action on branch push 2024-04-05 22:53:53 +02:00
bytedream
c40ea8b132 Update dependencies and version 2024-04-05 22:32:18 +02:00
bytedream
6b6d24a575 Update dependencies and version 2024-04-04 21:16:32 +02:00
bytedream
8c1868f2fd Update dependencies and version 2024-04-03 17:14:07 +02:00
bytedream
af8ab24826 Update search command url help 2024-04-03 17:14:04 +02:00
bytedream
c0f3346846 Update README.md 2024-04-03 16:46:49 +02:00
bytedream
111e461b30 Update dependencies and version 2024-04-03 16:26:33 +02:00
Amelia Frost
f16cd25ea4 Fix for some chapters being sent by CR as floats (#351)
* Fix for some chapters being sent by CR as floats.
See: 3f3a80f7f7

* Compile fix for error[E0277]: cannot multiply `f32` by `u32`

* Format

Co-authored-by: bytedream <bytedream@protonmail.com>
2024-04-03 16:26:33 +02:00
bytedream
e694046b07 Move to new, DRM-free, endpoint 2024-04-03 16:26:33 +02:00
Amelia
ba8028737d
Update missing fonts (#360)
* Update missing fonts

* Compile fix
2024-04-03 15:49:51 +02:00
bytedream
89be8ac429 Update README.md 2024-03-25 13:31:23 +01:00
bytedream
26a858c1a1 Update dependencies and version 2024-03-10 22:04:58 +01:00
bytedream
d3696c783c Include archive chapters only if flag is set 2024-03-10 21:57:20 +01:00
bytedream
88a28e843f Manually specify ffmpeg output color format 2024-03-10 19:40:36 +01:00
bytedream
a0fa2bfd8a Update dependencies and version 2024-03-10 13:40:26 +01:00
bytedream
013273b832 Format code 2024-03-10 13:40:16 +01:00
bytedream
3bf2458774 Pass command args manually to cli entrypoint instead of parsing from environment 2024-03-10 13:28:18 +01:00
bytedream
e3a7fd9246 Add option so specify different proxies for api and download requests (#282) 2024-03-10 13:21:53 +01:00
bytedream
f1d266c940 Add options to specify audio & subtitle locales as IETF language tag and add --language_tagging flag for archive and download to modify the output file language tagging (#330) 2024-03-10 04:04:58 +01:00
bytedream
3f33db6728 Remove deprecated openssl and openssl-static features 2024-03-10 02:07:05 +01:00
bytedream
56f0ed1795 Add --include-chapters flag to archive and download (#301) 2024-03-10 01:59:47 +01:00
Username404-59
9c44fa7dae
README.md: Fix a typo (#344) 2024-03-03 22:40:41 +01:00
bytedream
3099aac0e7 Revert macos action downgrade and disable caching instead 2024-02-26 20:42:45 +01:00
Hannes Braun
9a6959970a
Remove superfluous mut keywords (#341) 2024-02-26 20:09:54 +01:00
bytedream
d2589a3a6f Use macos 12 instead of 13 for ci 2024-02-25 19:01:35 +01:00
bytedream
52da6eacc9 Fix search command always showing non-premium account warning message 2024-02-25 19:01:35 +01:00
bytedream
5634ce3277
Add archive --skip-existing-method flag (#292) (#325)
* Add archive `--skip-existing-method` flag (#292)

* Fix re-download only issued when local file has more audios/subtitles & respect `--no-closed-captions` flag
2024-02-25 18:48:18 +01:00
bytedream
6a7aa25e1a
Add ffmpeg amd hardware acceleration presets (#324) 2024-02-25 18:46:48 +01:00
bytedream
6a50567916
Merge pull request #335 from KevinStaude/patch-1
Update README.md
2024-02-23 18:46:02 +01:00
bytedream
2084328069 Fix ffmpeg progress panic (#337) 2024-02-23 17:36:37 +01:00
Kevin
d3ab2245a8
Update README.md
minor fix
--output-specials -o "something" isn't working
--output-specials "something" is correct
2024-02-15 23:52:47 +01:00
bytedream
c31b1f4db9 Update nix flake.lock (#333) 2024-02-14 20:27:00 +01:00
bytedream
8187269128 Upload manpages and completions only once in ci 2024-02-01 14:45:12 +01:00
bytedream
5d68f0334a Update actions used in ci 2024-01-30 23:55:52 +01:00
bytedream
a2464bad4e Add M1 runner to mac build ci 2024-01-30 23:49:20 +01:00
bytedream
0f06c7ac71 Change delimiter of audio template option to _ and make it configurable via the CRUNCHY_CLI_FORMAT_DELIMITER env variable (#311) 2024-01-29 11:52:32 +01:00
kralverde
f8309f2e80
add archive no-closed-captions flag (#323) 2024-01-29 08:26:40 +01:00
kralverde
982e521e0b
add universal output flag (#319)
* add universal filenames setting

* rename flag and help
2024-01-29 08:24:56 +01:00
kralverde
a4abb14ae3
use a 'close enough' method to audio auto merge (#286) (#320)
* use a 'close enough' method to audio merge

* change default, rename flag, and use more gooder words
2024-01-29 08:18:42 +01:00
bytedream
7cf7a8e71c Take closed_captions api field for subtitles into account (#297) 2024-01-28 02:04:42 +01:00
bytedream
3b9fc52890 Add notice & warning that an anonymous or non-premium account may result to incomplete results with search (#288) 2024-01-28 01:03:59 +01:00
bytedream
444dc65a29 Clarify risks of using the --experimental-fixes flag 2024-01-28 01:02:51 +01:00
bytedream
658bb86800 Run ci on every branch 2024-01-26 00:07:15 +01:00
bytedream
6e01e9e8a7 Fix comment misspelling 2024-01-14 22:39:05 +01:00
bytedream
937e9a2fdc Fix verbosity not applied if flag is used globally 2024-01-14 22:33:32 +01:00
bytedream
fbe182239a Update dependencies and version 2024-01-14 22:15:08 +01:00
bytedream
5490243df8 Fix episode filtering not working if specifying no season 2024-01-14 21:02:33 +01:00
bytedream
20f796f603 Re-add download timeout 2024-01-14 20:36:00 +01:00
bytedream
f3faa5bf94 Update dependencies and version 2024-01-11 13:53:05 +01:00
bytedream
3f401ccbd7 Fix output progressbar always on 100% when using download 2024-01-10 23:17:20 +01:00
bytedream
35447c5cb0 Fix Windows output progress bar (#305) 2024-01-10 23:17:02 +01:00
bytedream
333d574e56 Update dependencies and version 2024-01-10 13:37:16 +01:00
bytedream
7c42f29596 Only use tempfile name as windows named pipe name (#305) 2024-01-10 13:15:30 +01:00
bytedream
ef2898f0e1 Update dependencies and version 2024-01-09 15:30:52 +01:00
bytedream
650338d3e6 Prepend ./ to the output path on linux if the input path is only a filename (#303) 2024-01-09 15:24:08 +01:00
bytedream
c37d55aade Update version 2024-01-03 01:20:34 +01:00
bytedream
d90f45fa31 Update checkout action version 2024-01-03 01:08:40 +01:00
bytedream
99f96e3e35 Fix login command not working 2024-01-03 01:07:12 +01:00
bytedream
d3837f2495 Add new flags and format options to README 2024-01-03 00:34:16 +01:00
bytedream
fc6da9a76d Use latest Rust version in Linux and Mac toolchain 2024-01-03 00:00:00 +01:00
bytedream
283a3802b2 Update dependencies and version 2024-01-02 23:59:44 +01:00
bytedream
172e3612d0 Fix open-ended episode filter (#293) 2024-01-02 22:48:21 +01:00
bytedream
2e6246c439 Do not sanitize user path input 2024-01-02 22:26:52 +01:00
bytedream
d503d459cd Differ between illegal Windows and non Windows file characters 2024-01-02 22:26:16 +01:00
bytedream
19935df545 Add more output format options (#284) 2023-12-23 15:28:10 +01:00
bytedream
0da81a4814 Add --include-fonts flag for archive (#277) 2023-12-19 22:41:02 +01:00
bytedream
0a26083232 Fix ffmpeg progress not working with fast encoder 2023-12-10 14:27:05 +01:00
bytedream
8613ea80cc Add forced flag to all CC subtitles (#274) 2023-12-10 13:52:33 +01:00
bytedream
b97c2a922e Fix windows ci 2023-12-10 03:36:39 +01:00
bytedream
be3248a4f9 Add download/request speed limiter (#250) 2023-12-10 02:52:42 +01:00
bytedream
f9e431e181 Add ability to use root flags after subcommands 2023-12-09 17:23:18 +01:00
bytedream
77609be598 Replace all login username references with email 2023-12-09 17:22:53 +01:00
bytedream
b4057599a1 Add --ffmpeg-threads flag to control the ffmpeg thread number 2023-12-09 01:34:23 +01:00
bytedream
6c7ab04b99 Lint 2023-12-08 23:04:04 +01:00
bytedream
9487dd3dbf Show ffmpeg progress (#270) 2023-12-08 23:03:44 +01:00
bytedream
9ca3b79291 Fix spelling 2023-12-03 00:15:57 +01:00
bytedream
8f77028fcb Show error message instead of panicking when capturing video length of invalid file (#258) 2023-12-01 01:17:49 +01:00
bytedream
d5df3df95f Fix fixed subtitle formatting and sorting (#272) 2023-12-01 01:02:53 +01:00
bytedream
440ccd99b5 Update dependencies and version 2023-11-20 22:05:06 +01:00
bytedream
2c37093959 Manually burn-in subtitles only if no pre-burned video is available (#268) 2023-11-19 19:24:15 +01:00
bytedream
14e71c05b8 Fix aur binary checksums (#266) 2023-11-16 13:51:30 +01:00
bytedream
d52fe7fb92 Update dependencies and version 2023-11-06 22:56:51 +01:00
bytedream
c08931b610 Add new commands and format option to readme 2023-11-06 22:55:23 +01:00
bytedream
fc6511a361 Format code 2023-11-06 22:12:28 +01:00
bytedream
56411c6547 Add missing whitespaces in command help 2023-11-06 22:01:44 +01:00
ByteDream
4d01e2a4ec
Merge pull request #257 from crunchy-labs/feature/relative_sequence_number
Add flags and option to control special episode behavior (#206, #241, #246)
2023-11-06 20:58:55 +00:00
bytedream
cd35dfe276 Rename --special-output to --output-specials 2023-11-06 21:49:47 +01:00
bytedream
f31437fba2 Remove leading and trailing whitespaces from output file 2023-11-06 21:20:43 +01:00
bytedream
e5d9c27af7 Fix ass filter path escape on windows (#262) 2023-11-06 21:15:50 +01:00
bytedream
787d8ab02c Add --special-output and --skip-specials flag 2023-11-04 15:24:14 +01:00
kennedy
7594412f58
updated brew url (#263)
* updated brew url

Its most appropriate to forward users to the brew's information page generated for crunchy-cli. There are stats on amount of downloads, see where the manifest is location, and what architectures are built for it.

* Update README.md

Co-authored-by: ByteDream <63594396+ByteDream@users.noreply.github.com>

---------

Co-authored-by: ByteDream <63594396+ByteDream@users.noreply.github.com>
2023-11-02 13:37:40 +01:00
kennedy
d8b76f8cc7
Add homebrew instructions (#261)
Added details about homebrew and what archs are supported.

made minor style linting: add space surrounding shell code blocks, and headers.
2023-10-29 06:12:25 +01:00
Catd
f56d9ecabf
Changes in Readme regarding subtitles and flag usage (#255)
* Update README.md

updated Flags and subtitles sections

* Update README.md

* Update README.md

Comma in a better place
2023-10-16 17:04:45 +02:00
bytedream
5a3a304443 Use episode sequence number as filter number for url episode filtering 2023-10-15 23:52:44 +02:00
bytedream
d0fe7f54f6 Show fractal in relative_sequence_number if present 2023-10-15 23:34:22 +02:00
bytedream
685c79d673 Add 2-digit padding to relative_episode_number, sequence_number and relative_sequence_number format option 2023-10-15 22:56:45 +02:00
bytedream
5d17bb1ac7 Merge remote-tracking branch 'origin/master' into feature/relative_sequence_number
# Conflicts:
#	crunchy-cli-core/src/utils/format.rs
2023-10-15 22:53:47 +02:00
bytedream
568bce0008 Manually implement filename sanitizing to allow the usage of file separators 2023-10-15 22:43:04 +02:00
Valentine Briese
bbb5a78765
Add --threads (-t) option to downloading commands (#256)
* Add `single-threaded` option to downloading commands

* Replace `--single-threaded` boolean option with `--threads` optional `usize` option

* Simplify `threads` field unwrapping

* Make `--threads` `usize` with a default value
2023-10-15 20:52:53 +02:00
bytedream
81385ef6ce Add relative_sequence_number format option (#206, #241, #246) 2023-10-15 20:49:03 +02:00
bytedream
13335c020b Sanitize the full output filename (#253) 2023-10-13 11:41:56 +02:00
Valentine Briese
e5db8e9504
Fix ffmpeg-preset option in download command (#254) 2023-10-12 21:20:06 +02:00
ByteDream
5bc68ad592
Merge pull request #251 from valentinegb/apple-hardware-acceleration
Add FFmpeg Apple hardware acceleration and make HEVC codec compatible with Apple standards
2023-10-12 13:28:37 +02:00
Valentine Briese
7095e2b8b6 Use -q:v FFmpeg option for Apple hardware acceleration 2023-10-11 18:54:47 -07:00
Valentine Briese
610593a795 Make H265 codec compatible with Apple HEVC standards 2023-10-11 18:26:51 -07:00
Valentine Briese
9596175f7f Add FFmpeg Apple hardware acceleration 2023-10-11 18:24:45 -07:00
bytedream
f48474ba77 Remove numbers from binary PKGBUILD env variables 2023-09-27 00:03:26 +02:00
bytedream
d79197edc6 Use async mutex and channel instead of the std equivalents 2023-09-23 16:56:42 +02:00
bytedream
a93a1fa807 Fix env variable resolving in publish pipeline 2023-09-22 12:11:00 +02:00
bytedream
3e21ca4fe7 Update dependencies and version 2023-09-22 11:52:39 +02:00
bytedream
01913e0db3 Adjust ci and PKGBUILD build args to feature changes 2023-09-21 19:20:00 +02:00
bytedream
64428ea7d1 Rename native-tls crate to prevent false-positive build warnings 2023-09-21 19:18:29 +02:00
bytedream
8eda8df3f7 Use native tls as default tls backend, add features to use rustls or openssl instead 2023-09-21 13:48:35 +02:00
bytedream
185b65fc9b Remove invalid character from AUR binary PKGBUILD 2023-09-08 22:49:58 +02:00
bytedream
7485bd8e76 Update dependencies and version 2023-09-08 21:41:25 +02:00
bytedream
0f7e6c9120 Add root flag to set custom user agent 2023-09-06 03:02:40 +02:00
bytedream
b477ca982c Add options to get drm dash and hls url with search 2023-09-06 02:55:04 +02:00
bytedream
18f891efd2 Use system certs when using openssl 2023-08-25 17:15:06 +02:00
bytedream
3ae6fe4a1a Fmt 2023-08-25 17:14:44 +02:00
Simon
e06e6b2b01
Update README.md (#240) 2023-08-25 15:21:37 +02:00
bytedream
70b3a7a3e1 Remove toolchain setup step from apple build action 2023-08-25 14:34:16 +02:00
bytedream
a80f6e5df4 Use workspace instead of separate Cargo.lock file 2023-08-25 14:24:12 +02:00
bytedream
2f57b07559 Change ci cargo cache key 2023-08-17 11:58:36 +02:00
bytedream
596fcc2342 Fix relative episode number exceeding actual episode count (#238) 2023-08-17 11:54:00 +02:00
bytedream
70b41b4dd5 Show an error message if no url was given 2023-08-17 11:53:57 +02:00
StepBroBD
6a6b981979
Fix nix flake overlays (#236) 2023-08-16 20:29:39 +02:00
bytedream
92ed4bd87d Revert "Replace native-tls its internal fork in the root crate"
This reverts commit 31fe1460f1.
2023-08-16 19:08:13 +02:00
bytedream
d295a57f84 Use dynamically linked openssl when running with nix 2023-08-16 17:26:03 +02:00
bytedream
31fe1460f1 Replace native-tls its internal fork in the root crate 2023-08-16 16:57:35 +02:00
bytedream
f45bb19cd7 Remove duplicated native-tls entry (#235) 2023-08-16 16:55:16 +02:00
bytedream
6da292f013 Do not test PKGBUILD on arch release pipeline 2023-08-14 15:46:14 +02:00
bytedream
a45833f5a2 Update dependencies and version 2023-08-14 15:02:44 +02:00
bytedream
448b633be8 Fix AUR completion and license directory 2023-08-08 17:33:08 +02:00
bytedream
800df5ca6c (Re-)add scoop install example 2023-08-08 11:06:28 +02:00
bytedream
9f9aec1f8a Fix cross installation if cache is present 2023-08-07 17:48:30 +02:00
bytedream
b98332eae4 Add aarch64 support for arch release pipeline 2023-08-07 17:47:06 +02:00
bytedream
40397e96a3 Fix arch release pipeline 2023-08-07 17:38:02 +02:00
bytedream
aef2fddff7 Update version 2023-08-07 16:24:47 +02:00
bytedream
a12a8bc366 Update README 2023-08-07 16:19:18 +02:00
ByteDream
6f40ffacec
Change license (#223) 2023-08-07 15:58:51 +02:00
ByteDream
2bcaa6e4d5
Add ci to publish AUR package on release (#233) 2023-08-07 15:40:05 +02:00
bytedream
0586f38cdc Update ffmpeg preset help message 2023-08-07 15:33:42 +02:00
bytedream
435b75bbf9 Add aarch64 architecture to linux ci 2023-07-27 22:10:30 +02:00
bytedream
b1342d54f3 Change name of output artifacts 2023-07-27 22:04:38 +02:00
bytedream
700b041f9a Remove deprecated archive --locale flag 2023-07-27 14:18:53 +02:00
bytedream
84c70f2bee Add workaround for incorrect hardsub labeling (#231) 2023-07-26 20:51:34 +02:00
bytedream
4c396a9e4a Remove option to configure ffmpeg args with env variables 2023-07-26 19:17:10 +02:00
bytedream
9ced3483d8 Error if download series has an episode in an unexpected language and input url is a series url (#225) 2023-07-22 15:13:24 +02:00
bytedream
db156d361f Update dependencies and version 2023-07-21 13:56:07 +02:00
bytedream
5afda0b5f1 Add openssl alpn support 2023-07-20 19:59:45 +02:00
bytedream
068c0fcac1 Split ci platforms in separate jobs 2023-07-20 18:07:32 +02:00
bytedream
00e8082e66 Remove test ci step 2023-07-20 14:21:57 +02:00
bytedream
dc6bc0d951 Add openssl tls backend for all platforms 2023-07-20 13:46:55 +02:00
bytedream
4ec9a0d309 Add native-tls/openssl tls backend for linux 2023-07-17 16:08:54 +02:00
bytedream
566422cb06 Update dependencies 2023-07-17 14:32:32 +02:00
bytedream
dd2033d323 Revert "Use config file to store sessions"
This reverts commit 850aa7a9
2023-07-13 16:07:43 +02:00
bytedream
8490263e84 Add info output that login command saved the session 2023-07-13 14:09:27 +02:00
bytedream
6b76887978 Fix error when using etp-rt or anonymous flag after login command 2023-07-13 14:08:11 +02:00
bytedream
850aa7a969 Use config file to store sessions 2023-07-13 14:07:14 +02:00
bytedream
9ad27102fc Fix missing identifier on newer simulcast titles (#227) 2023-07-12 22:02:55 +02:00
bytedream
513353890d Respect debug output when showing subtitle download spinner 2023-07-12 21:34:49 +02:00
bytedream
49de7bbba9 Add progress indicator for subtitle download 2023-07-12 21:15:01 +02:00
bytedream
751735477c Update dependencies 2023-07-08 16:36:35 +02:00
bytedream
1fe8746dda Add support for old url scheme (#224) 2023-07-05 16:01:55 +02:00
bytedream
af8a88a792 Add option to force subtitle burn with download (#221) 2023-07-05 01:58:46 +02:00
Bastian Venz
f40dc0dd1c
Update crunchyroll-rs to 0.3.7 (#220) 2023-06-26 17:59:55 +02:00
bytedream
0234d46bf9 Fix duplicated download with archive 2023-06-25 17:47:40 +02:00
bytedream
618d2206a2 Hide interactive output when -q flag is set 2023-06-24 13:29:22 +02:00
bytedream
75b6e7b452 Add long flag options to -v and -q (--verbose and --quiet) 2023-06-24 12:57:22 +02:00
bytedream
fc44b8af8a Add option to select seasons when season number is duplicated (#199) 2023-06-23 17:19:16 +02:00
bytedream
d75c04fbb6 Download all seasons if season number is duplicated 2023-06-23 15:28:09 +02:00
bytedream
f4682e0f29 Add search command to man pages 2023-06-20 00:53:19 +02:00
bytedream
5b8a4b9969 Add simple search command description 2023-06-20 00:53:12 +02:00
bytedream
0ef4980ab3 Update dependencies and version 2023-06-20 00:35:37 +02:00
bytedream
2ebc76a0df Change search README documentation position 2023-06-20 00:33:31 +02:00
bytedream
f7af983526 Remove search --filter-subtitles flag 2023-06-20 00:31:35 +02:00
bytedream
0cd647fb14 Add search documentation 2023-06-20 00:31:35 +02:00
bytedream
4e4a4355f5 Add more search replace fields 2023-06-20 00:31:35 +02:00
bytedream
0b044ba27e Check search scopes before replacing 2023-06-20 00:31:35 +02:00
bytedream
26ca3ca65c Remove usage of deprecated functions 2023-06-20 00:31:35 +02:00
bytedream
e9b4837f44 Clean up search a bit 2023-06-20 00:31:35 +02:00
bytedream
0aa648b1a5 Add basic search command 2023-06-20 00:31:35 +02:00
bytedream
0beaa99bfd Update supported urls in README 2023-06-20 00:09:54 +02:00
bytedream
7ed1158339 Fix subtitle sorting (#208) 2023-06-19 01:46:42 +02:00
bytedream
b55ac9a51a Update README 2023-06-16 10:18:37 +02:00
Peter Mahon
32aab193d0 Add Output Templates section in README.md
Added a section for output templates so that users of the application have an easy reference as opposed to searching within the code. 

I also updated the Output Templates subsection in the downloads section to mention both .ts and .mp4 files since the default changes in version crunchy-cli v3.0.0-dev.9 of the binaries.
2023-06-16 10:18:37 +02:00
StepBroBD
49d64805ca
add nix flake (#210)
- add following functionality:
  - nix develop with direnv support
  - nix run and nix shell
  - nix fmt for flake.nix
  - and package overlay for https://github.com/NixOS/nixpkgs/pull/225502

- useful docs
  - https://stackoverflow.com/questions/53272197/how-do-i-override-the-libc-in-a-nix-package-to-be-musl
  - dd3aca2d0b/pkgs/top-level/stage.nix (L136)

- inspired by https://github.com/typst/typst/blob/main/flake.nix
2023-06-04 17:57:28 +02:00
bytedream
19f79a4349 Remove no-subtitle-optimization flag in README 2023-05-25 18:58:03 +02:00
bytedream
f3f900064a Fix flag README typo 2023-05-25 18:57:33 +02:00
bytedream
a2b7c78752 Fix long help language formatting 2023-05-24 13:15:28 +02:00
bytedream
4bd172df06 Fix japanese episode download if episode isn't available in specified language with archive (#207) 2023-05-24 12:55:47 +02:00
bocchi
b24827dc6b
fix login (#202) 2023-05-13 18:19:44 +02:00
ByteDream
61766c74fa Enable usage of auth flags behind login command 2023-05-07 01:34:41 +02:00
ByteDream
c2e953043e Fix output filename if file stem is empty but file exists 2023-05-06 22:07:02 +02:00
bytedream
dc431a9637 Update version 2023-04-25 10:47:08 +02:00
ByteDream
0f73d8dbec Update conditions for subtitle to be marked as CC 2023-04-23 15:57:49 +02:00
ByteDream
94fcf1590a Add .mov to known soft-sub containers 2023-04-23 14:18:40 +02:00
ByteDream
7b1ed30b20 Fix subtitle burn-in error (#198) 2023-04-23 13:56:48 +02:00
ByteDream
ce358041be Removed unused struct 2023-04-23 13:45:08 +02:00
ByteDream
ff258c0722 Remove --vv flag 2023-04-23 13:21:50 +02:00
ByteDream
e277b4200f Update version 2023-04-17 18:24:20 +02:00
ByteDream
847c6a1abc Mark CC subtitle track as CC & grab normal subtitles and CC when using -m audio (#141) 2023-04-17 18:14:54 +02:00
bocchi
c0e2df4804 redundant to specify default value 2023-04-15 15:51:18 +02:00
bocchi
c4a4651164 set default value for locale to empty vec 2023-04-15 15:51:18 +02:00
ByteDream
13d8cc26c9 Remove signal-hook dependency 2023-04-13 22:47:14 +02:00
ByteDream
d754f9339b Update version 2023-04-13 21:59:27 +02:00
ByteDream
95f8cc542c Add retry if connection got reset by peer (#144) 2023-04-13 21:55:10 +02:00
ByteDream
3c648f4192 Actually remove session file if login remove flag is set 2023-04-13 21:35:28 +02:00
ByteDream
d8d1f8a443 Execute cli pre-check before logging in 2023-04-13 21:30:54 +02:00
ByteDream
f584c8028f Deprecate archive -l flag 2023-04-13 21:22:33 +02:00
ByteDream
273db9fe6a Add rate limit notice if detected 2023-04-13 21:12:43 +02:00
bytedream
bfc50653b1 Fix cms rate limiting error for episodes and movies (#180) 2023-04-13 21:12:43 +02:00
ByteDream
c4c15f9b11 Disable default features for reqwest 2023-04-12 07:53:51 +02:00
ByteDream
d33e2fa36b Add proxy flag (#142) 2023-04-12 07:53:51 +02:00
ByteDream
5f98cfb186 Check for remaining disk space (#150) 2023-04-11 09:10:09 +02:00
ByteDream
ee1344fc6b Update version 2023-04-09 20:37:10 +02:00
ByteDream
baa6ca5018 Add function to get temp directory 2023-04-09 16:12:28 +02:00
ByteDream
fcbcd175e1 Add env variable to make temp directory configurable (#153) 2023-04-09 16:12:28 +02:00
ByteDream
481f35d232 Update version 2023-04-09 11:34:50 +02:00
ByteDream
cb7612e86b Fix help message indentation 2023-04-09 11:26:15 +02:00
ByteDream
d79f00871e Enable special files to be declared as output file 2023-04-09 11:24:12 +02:00
Hannes Braun
7e34076a7b Fix offset in relative episode number 2023-04-08 17:31:12 +02:00
ByteDream
8d1be6b573 Update dependencies 2023-04-08 15:13:19 +02:00
ByteDream
bd61c18859 Remove dependabot 2023-04-08 15:10:37 +02:00
ByteDream
8e972ab578 Move stream download logic to fix cms error/rate limiting 2023-04-08 15:08:36 +02:00
ByteDream
1213880df7
Add feature request template 2023-03-23 19:39:52 +00:00
ByteDream
8a52307845 Update issue templates 2023-03-23 20:36:19 +01:00
ByteDream
e819e44671
Merge pull request #178 from crunchy-labs/feature/refactoring
Refactoring & library update
2023-03-23 18:49:00 +00:00
ByteDream
c97adb3ce7 Remove duplicated subtitles on archive audio merge 2023-03-23 17:17:55 +01:00
ByteDream
bd20c5a7b6 Update dependencies 2023-03-23 15:00:35 +01:00
ByteDream
55483878b3 Re-add hwaccel cuda 2023-03-23 13:53:37 +01:00
ByteDream
b1d23c2f25 Merge branch 'master' into feature/refactoring
# Conflicts:
#	crunchy-cli-core/src/cli/utils.rs
2023-03-23 13:51:52 +01:00
ByteDream
ba1c0aaaa4 Enable stdout output 2023-03-23 13:45:42 +01:00
ByteDream
a7adb7191e Fix archive not recognizing locale when using direct episode url 2023-03-23 12:52:44 +01:00
ByteDream
57236f2b31 Add a flag to enable experimental fixes (disabled by default now) 2023-03-23 01:51:51 +01:00
ByteDream
0a40f3c40f Refactor 2023-03-23 01:18:14 +01:00
LetMeByte
a6bfe0be2e
📚 Readme Overhaul 📚 (#163)
* 📚

* Update README.md

* Update README.md
2023-03-08 22:31:03 +01:00
ByteDream
d6f1262c1c Fix no such file or directory when using login (#164) 2023-02-27 11:17:32 +01:00
LetMeByte
19f9d26af9 Update utils.rs 2023-02-23 16:57:57 +01:00
LetMeByte
90212c4ec0
Move config dir and session file (#156)
* Move config dir and session file

* Update login.rs
2023-02-22 22:45:00 +01:00
ByteDream
c315f87f33
Merge pull request #155 from LetMeByte/—
Replace spinner horizontal character
2023-02-22 18:26:31 +01:00
LetMeByte
656ce0b523
Update log.rs 2023-02-22 17:35:53 +01:00
ByteDream
758db86f2f
Merge pull request #154 from hannesbraun/season-choosing-offset
Fix offset in interactive season choosing
2023-02-21 16:29:03 +01:00
Hannes Braun
e4919e80ba
Fix offset in interactive season choosing 2023-02-20 22:34:48 +01:00
ByteDream
fbc98b2308
Merge pull request #136 from hitorilabs/fix-part-ep
bugfix: duplicate ep numbers archived into same file
2023-02-19 23:41:41 +01:00
bocchi
03dd1c5264 get rid of btreemap in archive 2023-02-07 11:04:41 -05:00
bocchi
cba921f1a8 use sequence_number instead of episode_number 2023-02-06 04:04:26 -05:00
bocchi
264d943a2c use episode id instead 2023-02-06 03:22:04 -05:00
bocchi
ba57d3c25d bugfix: btreemap skips duplicate ep nums 2023-02-06 03:11:52 -05:00
ByteDream
96b259ce9a Add url filtering section to README (#133) 2023-02-05 15:00:50 +01:00
ByteDream
1a08e76162
Merge pull request #130 from adracea/patch-3
Update README.md
2023-01-31 12:45:02 +01:00
Alexandru Dracea
e83de60efa
Update README.md
Adds a bit of guidance for how to properly `install` .
2023-01-31 13:37:29 +02:00
ByteDream
43e32e1453
Merge pull request #121 from crunchy-labs/feature/more-ffmpeg-options
More ffmpeg options
2023-01-27 21:20:55 +01:00
ByteDream
e115dcd87f Rework ffmpeg preset, add 3 quality levels and custom flags (#108) 2023-01-25 00:48:35 +01:00
ByteDream
32691e6fa5
Merge pull request #117 from hannesbraun/keep-auto-merged-subs
Don't remove the subtitles if the video is detected to be identical
2023-01-18 21:26:38 +01:00
Hannes Braun
21a5782825
Don't remove the subtitles if the video is detected to be identical 2023-01-17 23:13:13 +01:00
ByteDream
b3226cdde5 Change av1 encoder to libsvtav1 (#108) 2023-01-17 17:25:09 +01:00
ByteDream
cdf054ff58
Merge pull request #116 from crunchy-labs/feature/all-languages
(Re-)add `-l all` languages
2023-01-16 15:55:39 +01:00
ByteDream
1844a563d2
Merge branch 'master' into feature/all-languages 2023-01-16 15:43:33 +01:00
ByteDream
901fdc0dbc
Merge pull request #115 from crunchy-labs/feature/skip-existing-files
Skip existing files
2023-01-16 15:40:39 +01:00
ByteDream
6bd75c93cb Simplify archive no audio present output 2023-01-15 22:18:10 +01:00
ByteDream
3dd8385aac (Re-)enable -l all for archive (#110) 2023-01-15 22:09:23 +01:00
ByteDream
577c0679ad Remove automatically set filename if output is empty 2023-01-15 21:43:31 +01:00
ByteDream
685ac85857 Add flag to skip existing files (#67, #109) 2023-01-15 21:41:05 +01:00
ByteDream
b5bc36c4a2
Merge pull request #111 from crunchy-labs/fix/download-no-hardsub-videos
Manually add subtitles to download videos
2023-01-14 00:03:18 +01:00
ByteDream
497f22ee49 Fix double download progress output message 2023-01-13 22:38:29 +01:00
ByteDream
3d145b021b Add download ffmpeg error output 2023-01-13 20:25:17 +01:00
ByteDream
08c4e30a06 (Re-)add download pipe to stdout 2023-01-13 16:03:19 +01:00
ByteDream
6d1f8d49f6 Add hardsubs manually to download videos (#81) 2023-01-13 15:23:55 +01:00
ByteDream
17233f2fd2 Update dependencies and version 2023-01-10 22:15:36 +01:00
ByteDream
4482d5482f
Merge pull request #106 from crunchy-labs/feature/more-episode-number-format-options
Add relative episode number format option
2023-01-10 20:17:52 +01:00
ByteDream
5ce5b249c9 Add relative episode number to cli help 2023-01-10 19:20:08 +01:00
ByteDream
3029325776 Add check if request locale is valid (#102) 2023-01-09 23:40:53 +01:00
ByteDream
a0aab3bfb9 Add arabic locale in duplicated seasons check 2023-01-09 23:25:16 +01:00
ByteDream
2ea036d4c6 Remove padded_*_number and make it default for *_number for output format 2023-01-09 19:12:31 +01:00
ByteDream
7d3a90e811 Add relative episode number to format 2023-01-09 19:12:00 +01:00
ByteDream
29845ba6e5 Re-order instructions 2023-01-09 17:26:04 +01:00
ByteDream
12be16417f Fix interactive season choosing false-positive triggering 2023-01-09 16:55:10 +01:00
bytedream
4b33ef02c6 Fix output formatting for full path (#101) 2023-01-09 10:27:28 +01:00
ByteDream
13f54c0da6 Fix interactive season choosing activation on url filter excluded seasons 2023-01-08 18:33:23 +01:00
ByteDream
b65c0e9dfd Update dependencies & version 2023-01-08 17:44:31 +01:00
ByteDream
537158cd7b
Merge pull request #100 from crunchy-labs/feature/multiple-seasons-with-same-number
Interactive choosing on duplicated season numbers
2023-01-08 17:30:19 +01:00
ByteDream
b991614dc3 Fix output and download order on duplicated seasons 2023-01-07 17:14:47 +01:00
ByteDream
7588621f34 Add interactive input to choose season on duplicated season numbers (#55, #82) 2023-01-07 16:02:51 +01:00
ByteDream
54dfe8002e
Merge pull request #97 from hannesbraun/archive-all-subtitles
Archive subtitles of all versions of an episode
2023-01-06 19:28:01 +01:00
Hannes Braun
06fd9a7a98
Archive subtitles of all versions of an episode 2023-01-06 00:21:57 +01:00
ByteDream
892407d1f0 Fix --default-subtitle causing no such file error (#98) 2023-01-06 00:03:57 +01:00
ByteDream
404aa496e1 Fix subtitle look and feel typo 2023-01-05 22:28:23 +01:00
ByteDream
7726287859 :) 2023-01-04 01:28:14 +01:00
ByteDream
d0a8103e3d Update dependencies & version 2023-01-04 00:12:13 +01:00
ByteDream
29c6129e6e Update dependencies & version 2023-01-03 14:50:12 +01:00
ByteDream
c11851adc9
Merge pull request #95 from crunchy-labs/fix/rework-download
Fix & rework download
2023-01-03 01:51:19 +01:00
ByteDream
83bd71916c
Merge branch 'master' into fix/rework-download 2023-01-03 01:39:30 +01:00
ByteDream
b365bda5dc Fix download threads to properly return errors 2023-01-03 01:28:42 +01:00
ByteDream
3c3b7b6566 Fix panic on specific filenames 2023-01-03 01:24:17 +01:00
ByteDream
fae5d69933 Apply stabilizations fixes (#89) 2023-01-02 17:56:50 +01:00
ByteDream
0c13942016 Update dependencies 2023-01-02 17:53:54 +01:00
ByteDream
03db38b31c
Add debug segment percentage (#93)
* Fix file extension unwrap panic

* Change log output name from crunchy_cli_core to crunchy_cli

* Add percentage output
2022-12-28 15:45:33 +01:00
ByteDream
6267fd3ba7
Merge pull request #94 from adracea/patch-2
Add error handling and retry attempts
2022-12-28 15:43:51 +01:00
ByteDream
b8e46099f9 Re-increase segment request timeout 2022-12-28 15:35:38 +01:00
ByteDream
7115c5546d Show error message on segment download retry 2022-12-28 15:25:10 +01:00
ByteDream
d0681c7f6c Simplify retry segment download 2022-12-28 15:18:12 +01:00
Alexandru Dracea
c2ae622d01
Update utils.rs 2022-12-28 16:01:55 +02:00
Alexandru Dracea
8a3c0132e7
Update utils.rs 2022-12-28 15:59:55 +02:00
Alexandru Dracea
240e5563a3
Add error handling and retry attempts
Handles cases where the segments fail to download and sometimes get stuck by introducing a timeout and retrying on failure.
2022-12-28 15:44:45 +02:00
ByteDream
c5940a240c Slightly change download process to be more verbose in error situations 2022-12-28 02:18:17 +01:00
ByteDream
9e0edda7c2
Merge pull request #92 from crunchy-labs/fix/non-existing-parent-directory
Fix non existing parent directory
2022-12-28 01:10:43 +01:00
ByteDream
14f42833cb Fix output to special file (pipes etc.) 2022-12-27 22:59:35 +01:00
ByteDream
c37e2495e1 Create output parent directory if it doesn't exists (#91) 2022-12-27 20:49:53 +01:00
ByteDream
2c3bd78fc1 Leave special files untouched from renaming 2022-12-27 20:43:16 +01:00
ByteDream
022f23e3ab
Merge pull request #88 from crunchy-labs/fix/progress
Use library for progress
2022-12-22 16:39:12 +01:00
ByteDream
17fa045c32 Use library for progress 2022-12-22 14:45:56 +01:00
ByteDream
86759557fe
Merge pull request #87 from adracea/master
Add padding format option
2022-12-19 17:47:39 +01:00
Alexandru Dracea
af9aca4d0c
Add padding 2022-12-19 18:35:37 +02:00
bytedream
8bb2c9c750 Fix file name sanitizing 2022-12-19 15:22:45 +01:00
ByteDream
67bbc00d87 Add pre-release notice 2022-12-18 15:03:16 +01:00
ByteDream
4bfc6f22e1 Fix discord link 2022-12-18 13:50:20 +01:00
ByteDream
4f107d8cf2 Update version and dependencies 2022-12-18 12:51:36 +01:00
ByteDream
5de4a83e5d Change rust actions used 2022-12-18 12:50:35 +01:00
ByteDream
306019d8b8 Fix linux ci 2022-12-16 23:28:30 +01:00
ByteDream
2451e33639 Fix ubuntu musl package 2022-12-16 22:42:22 +01:00
ByteDream
03fe0c6f01 Add additional command 2022-12-16 21:57:26 +01:00
ByteDream
50c520d660
Merge pull request #80 from crunchy-labs/feature/anonymous-login
Add anonymous login
2022-12-16 21:06:37 +01:00
Alexandru Dracea
d49f2d8eaa
add minimal .gitignore (#83)
* add .gitignore

* more ignore

* newline

* readd .lock

Co-authored-by: Alexandru.Dracea <alexandru.dracea@finastra.com>
2022-12-16 16:09:52 +01:00
bytedream
cc9342cd0a Update dependencies 2022-12-16 10:09:41 +01:00
bytedream
f254df3bb3 Fix ci badge 2022-12-16 08:50:11 +01:00
bytedream
52ee0c48e1 Fix resolution ...p parsing 2022-12-13 08:51:37 +01:00
ByteDream
db3697c372 Add anonymous login examples to the README 2022-12-11 23:03:25 +01:00
ByteDream
b814529aa2 Add anonymous login 2022-12-09 16:33:34 +01:00
ByteDream
578e5ea5b7 Update version 2022-12-08 18:32:38 +01:00
ByteDream
a5e60ea6b7 Add generating file cli output 2022-12-08 15:10:53 +01:00
ByteDream
5c3f49e9f4
Merge pull request #78 from crunchy-labs/feature/ffmpeg-optimizations
Add optional ffmpeg optimizations
2022-12-08 14:44:46 +01:00
ByteDream
01e2603e84 Update ffmpeg preset command help 2022-12-08 14:21:52 +01:00
ByteDream
f0de4509c5 Add ffmpeg presets to download 2022-12-08 14:19:13 +01:00
ByteDream
2f7e992a6e Move ffmpeg presets to utils 2022-12-08 14:04:43 +01:00
ByteDream
985ec2ade9 Remove search command (wrong commit oops) 2022-12-08 01:57:14 +01:00
ByteDream
c4540ada50 Remove unwanted ffmpeg output when check if available 2022-12-08 01:40:19 +01:00
ByteDream
54018f9773 Fmt 2022-12-08 01:37:37 +01:00
ByteDream
a32d3aef87 Update dependencies 2022-12-08 00:12:25 +01:00
ByteDream
2e4e897dc1 Fix only nvidia preset stack overflow 2022-12-07 20:23:01 +01:00
ByteDream
ce5672588e Update dependencies 2022-12-07 20:09:01 +01:00
ByteDream
933d217b63 Add pre_check checking 2022-12-07 20:08:44 +01:00
ByteDream
1b1756a0ae Add long help to ffmpeg preset 2022-12-07 15:12:16 +01:00
ByteDream
6832c69eaa Add ffmpeg encode presets 2022-12-07 01:08:47 +01:00
ByteDream
91f8a82ca4 Fmt 2022-12-07 00:39:32 +01:00
ByteDream
4cd46f19ac Fix high ffmpeg cpu consuming with archive 2022-12-06 22:05:03 +01:00
ByteDream
c383b4d307 Fix filename generation if file already exists 2022-12-04 19:10:43 +01:00
ByteDream
faadd89fff Wait until buffer is empty when downloading 2022-12-04 19:10:23 +01:00
ByteDream
342cf23ae0 Remove if condition from ci 2022-12-04 18:57:54 +01:00
ByteDream
285d27772c Add manually .ass file editing to fix #32 2022-12-04 18:54:22 +01:00
ByteDream
7c3bbfc173 Wait until buffer is empty when downloading 2022-12-04 18:54:19 +01:00
ByteDream
64717fd405 Update dependencies 2022-12-03 12:32:45 +01:00
ByteDream
cd1308426e Re-add static-ssl feature 2022-12-03 12:26:36 +01:00
ByteDream
5826d95e6a Add progress width offset 2022-12-03 01:13:40 +01:00
ByteDream
135d59ce8b Add pre-check function 2022-12-03 00:39:52 +01:00
ByteDream
9d45995e86 Set progress width with message to use complete space 2022-12-03 00:26:52 +01:00
ByteDream
afab3826c9 Extend function to get free file 2022-12-02 22:06:08 +01:00
ByteDream
e9b3088cde Fix windows ok output character 2022-12-02 21:42:24 +01:00
ByteDream
cd9c69baf1 Fix windows artifact name 2022-12-02 20:23:30 +01:00
ByteDream
33e27504f2 Update dependencies 2022-12-02 19:41:50 +01:00
ByteDream
3fcb512c18 Merge test and build ci 2022-12-02 18:19:13 +01:00
ByteDream
6aa4078be3 Update dependencies 2022-12-02 17:28:54 +01:00
ByteDream
474e9f5e31 Add very verbose output flag 2022-11-30 23:43:57 +01:00
ByteDream
f6d6c9435c Fix invalid Windows client builder 2022-11-30 22:30:40 +01:00
ByteDream
f687969f04 Fix isahc tls config 2022-11-30 21:53:23 +01:00
ByteDream
64bb39362e Add Windows certificates 2022-11-30 21:34:30 +01:00
ByteDream
0bb20d24a2 Update dependency versions 2022-11-30 21:34:20 +01:00
ByteDream
a3c717dc1a Split test job 2022-11-30 20:07:10 +01:00
ByteDream
12d49a27e4 Add static vc runtime 2022-11-30 19:25:21 +01:00
ByteDream
6ecd23bcd0 Fix ci 2022-11-30 19:25:00 +01:00
ByteDream
e200aab8ab Update ci 2022-11-30 19:04:11 +01:00
ByteDream
4095b80477 Update dependencies 2022-11-30 19:04:01 +01:00
ByteDream
99002e606f Change windows ci toolchain to gnu 2022-11-28 22:18:26 +01:00
bytedream
2d89a71203 Remove commit sha in filename 2022-11-28 12:18:44 +01:00
bytedream
b1182d4f7b Add (short) commit hash and build time to version hash 2022-11-28 11:54:04 +01:00
bytedream
59b5e3d239 Update version 2022-11-28 10:58:29 +01:00
ByteDream
24fbedc7d7 Fix ci short commit sha 2022-11-27 22:42:15 +01:00
ByteDream
c99eedd7a7 Re-fix ci zip workflow 2022-11-27 22:14:32 +01:00
ByteDream
81931829b0 Fix invalid ci file syntax 2022-11-27 20:43:13 +01:00
ByteDream
1487ba222e Add short commit sha to ci build artifacts 2022-11-27 20:38:37 +01:00
ByteDream
7fe587a891 Update version 2022-11-27 20:38:26 +01:00
ByteDream
9bfe6b0e54 Update authors 2022-11-27 20:38:26 +01:00
ByteDream
f3f41aa0a2 Add crunchy-cli-core/Cargo.lock 2022-11-27 20:38:26 +01:00
ByteDream
b118b74b99 Update README 2022-11-27 20:38:26 +01:00
bytedream
45c315e9bb Fix ci upload artifact action 2022-11-27 17:27:12 +01:00
bytedream
8d8333e414 Fix ci zip workflow (again) 2022-11-27 17:16:12 +01:00
bytedream
a760588441 Add ci badge 2022-11-27 17:11:32 +01:00
bytedream
3f4ce3a0a9 Fix ci zip action version 2022-11-27 17:11:26 +01:00
bytedream
3e7d2583b7 Merge branch 'next'
# Conflicts:
#	README.md
#	cli/commands/archive/archive.go
#	cli/commands/download/download.go
#	cli/commands/login/login.go
#	cli/root.go
#	crunchy-cli.1
#	go.mod
#	go.sum
#	utils/locale.go
2022-11-27 16:55:16 +01:00
ByteDream
a7c2bbe807 Add ci workflow 2022-11-27 15:18:55 +01:00
ByteDream
4fd98723ea Change archive flag name from audio to locale 2022-11-26 22:37:58 +01:00
ByteDream
502cb39923 Fix download binary description 2022-11-24 15:54:17 +01:00
ByteDream
039d7cfb81 Rewrite it in Rust 2022-11-24 15:30:49 +01:00
ByteDream
59e8793a2f Set option to modify locale used (#60) 2022-11-01 22:17:19 +01:00
ByteDream
10617df834 Fix archive sorting (#63) 2022-10-31 22:18:44 +01:00
ByteDream
95b66c3ff5 Fix subtitle styling and size (#66) 2022-10-31 21:19:24 +01:00
ByteDream
0572af4e07 Change links from beta.crunchyroll.com to www.crunchyroll.com 2022-10-28 12:43:51 +02:00
ByteDream
f3e93ba2b8
Merge pull request #61 from crunchy-labs/dependabot/go_modules/github.com/spf13/cobra-1.6.1
Bump github.com/spf13/cobra from 1.6.0 to 1.6.1
2022-10-25 07:51:35 +02:00
dependabot[bot]
dc7e5d564e
Bump github.com/spf13/cobra from 1.6.0 to 1.6.1
Bumps [github.com/spf13/cobra](https://github.com/spf13/cobra) from 1.6.0 to 1.6.1.
- [Release notes](https://github.com/spf13/cobra/releases)
- [Commits](https://github.com/spf13/cobra/compare/v1.6.0...v1.6.1)

---
updated-dependencies:
- dependency-name: github.com/spf13/cobra
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-10-25 01:56:32 +00:00
ByteDream
e0d100b627 Update dependencies and fix #59 partially 2022-10-20 22:06:43 +02:00
ByteDream
cd1587c613
Merge pull request #57 from crunchy-labs/dependabot/go_modules/github.com/spf13/cobra-1.6.0
Bump github.com/spf13/cobra from 1.5.0 to 1.6.0
2022-10-12 23:39:34 +02:00
dependabot[bot]
eba2417f4e
Bump github.com/spf13/cobra from 1.5.0 to 1.6.0
Bumps [github.com/spf13/cobra](https://github.com/spf13/cobra) from 1.5.0 to 1.6.0.
- [Release notes](https://github.com/spf13/cobra/releases)
- [Commits](https://github.com/spf13/cobra/compare/v1.5.0...v1.6.0)

---
updated-dependencies:
- dependency-name: github.com/spf13/cobra
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-10-12 01:48:15 +00:00
ByteDream
36bdc76a48
Update discord invite link 2022-10-08 02:05:29 +02:00
ByteDream
1e17c0b117
Merge pull request #56 from crunchy-labs/dependabot/go_modules/github.com/crunchy-labs/crunchyroll-go/v3-3.0.3
Bump github.com/crunchy-labs/crunchyroll-go/v3 from 3.0.2 to 3.0.3
2022-10-05 13:31:50 +02:00
dependabot[bot]
b4bc047b30
Bump github.com/crunchy-labs/crunchyroll-go/v3 from 3.0.2 to 3.0.3
Bumps [github.com/crunchy-labs/crunchyroll-go/v3](https://github.com/crunchy-labs/crunchyroll-go) from 3.0.2 to 3.0.3.
- [Release notes](https://github.com/crunchy-labs/crunchyroll-go/releases)
- [Changelog](https://github.com/crunchy-labs/crunchyroll-go/blob/master/news.go)
- [Commits](https://github.com/crunchy-labs/crunchyroll-go/compare/v3.0.2...v3.0.3)

---
updated-dependencies:
- dependency-name: github.com/crunchy-labs/crunchyroll-go/v3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-10-05 02:01:02 +00:00
bytedream
d4bef511cb Fix update executable path 2022-09-11 13:02:08 +02:00
bytedream
2f08aeac1a Add -s flag documentation 2022-09-05 22:55:27 +02:00
bytedream
3f12cbae95 Change -s default value 2022-09-05 22:54:52 +02:00
ByteDream
b99c3395f1
Merge pull request #48 from LordBex/master
Add flag to specify archive subtitles
2022-09-05 22:16:02 +02:00
LordBex
b42c87c9f8 remove stringInSlice and switch to ElementInSlice 2022-09-05 21:59:00 +02:00
LordBex
136d970fec move stringInSlice to separate as ElementInSlice 2022-09-05 21:30:41 +02:00
LordBex
8111f14da3 Merge branch 'master' of https://github.com/crunchy-labs/crunchy-cli 2022-09-05 15:41:00 +02:00
LordBex
97dd801137 adding subtitle flag for archive (-s, --sublang) 2022-09-05 15:40:49 +02:00
bytedream
13798b608e Merge remote-tracking branch 'origin/master' 2022-09-05 00:26:04 +02:00
bytedream
62938a500f Disable subtitles by default on re-encode 2022-09-05 00:24:31 +02:00
bytedream
36c1423ff6 Fix re-encode removes video, audio and subtitle tracks (#47) 2022-09-05 00:20:29 +02:00
ByteDream
027047fc7e
Fix binary links 2022-09-04 20:33:52 +02:00
bytedream
689bbcd9a4 Remove go version badge 2022-09-04 17:12:57 +02:00
bytedream
610c4e2993 Bump go version to 1.19 2022-09-04 17:12:36 +02:00
bytedream
d53b20717a Fix video length sometimes exceeds actual episode length (#32) 2022-09-03 18:22:34 +02:00
bytedream
dc2309ab10 Bump locale names in docs 2022-09-03 15:47:41 +02:00
bytedream
c02306ff9f Bump crunchyroll-go version 2022-09-03 15:47:25 +02:00
bytedream
3f78101eb8 Fix unwanted video track and wrong labeled audio track in archive (#45) 2022-09-03 15:18:52 +02:00
bytedream
afc85350ab Update arch install package name 2022-08-22 13:31:33 +02:00
bytedream
0371b31dcc Add info and update command examples 2022-08-22 13:06:05 +02:00
ByteDream
f974d5296b
Remove rename issue link 2022-08-22 12:41:47 +02:00
bytedream
e5636df969 Update crunchyroll-go version 2022-08-21 22:07:46 +02:00
bytedream
d13e5714f8 Fix README details 2022-08-21 19:40:05 +02:00
bytedream
a907958a71 Merge remote-tracking branch 'origin/next/partially-v3'
# Conflicts:
#	Makefile
#	README.md
#	cli/commands/archive/archive.go
#	cli/commands/download/download.go
#	cmd/crunchyroll-go/cmd/login.go
#	cmd/crunchyroll-go/cmd/root.go
#	cmd/crunchyroll-go/cmd/utils.go
#	cmd/crunchyroll-go/main.go
#	crunchy-cli.1
#	crunchyroll.go
#	go.mod
#	go.sum
#	utils/locale.go
#	utils/sort.go
2022-08-21 19:15:29 +02:00
bytedream
416507c8a6 Remove not working notice for scoop 2022-08-15 00:27:34 +02:00
bytedream
4ae4345c40 Fix smartrelease host url 2022-08-14 13:46:39 +02:00
bytedream
ac876f674a Update to newer crunchyroll-go version 2022-08-14 13:38:42 +02:00
bytedream
441ec084af Re-enable language choosing for series 2022-08-09 01:20:34 +02:00
bytedream
f7a21fbfb2 Change all etp rt related stuff to refresh token 2022-08-09 01:04:46 +02:00
bytedream
6239d10d22 Fix crunchyroll api changes 2022-08-08 22:20:25 +02:00
bytedream
a64981930b Add option to change temp dir 2022-08-02 12:07:43 +02:00
bytedream
caeb734b2c Add login session id warning 2022-08-01 00:41:09 +02:00
ByteDream
b5f4882601
Update issue templates 2022-08-01 00:18:14 +02:00
bytedream
81946c5092 Remove CI badge 2022-07-31 13:50:59 +02:00
bytedream
fbb90f9079 Fix info spacing 2022-07-29 18:23:38 +02:00
bytedream
b62769ccfd Update next version number 2022-07-27 21:53:03 +02:00
bytedream
8942ea574b Add v3 notice 2022-07-27 21:25:03 +02:00
bytedream
2773445050 Change author names and links to crunchy-labs 2022-07-26 12:47:23 +02:00
bytedream
5b4c228b60 Change crunchyroll-go dependency name 2022-07-25 10:53:29 +02:00
私はレオンです
fd502446c6
Fix typo 2022-07-11 22:03:02 +02:00
ByteDream
5b59662e29
Change discord shield server 2022-07-09 01:25:59 +02:00
ByteDream
1365910610
Change discord invite link 2022-07-09 01:25:00 +02:00
ByteDream
680db83c59
Reactivate scoop install instructions 2022-07-09 01:03:09 +02:00
bytedream
f1a41d6d3b Change name due to organization move 2022-07-07 21:21:44 +02:00
bytedream
303689ecbb Move and refactor files and some more changes :3 2022-06-30 16:08:08 +02:00
bytedream
781e520591 Update cobra version to 1.5.0 2022-06-29 20:39:38 +02:00
bytedream
d65226252d Add split notice to README 2022-06-27 22:36:46 +02:00
bytedream
8a3e42e4d1 Remove library & refactor cli 2022-06-27 22:33:26 +02:00
bytedream
0fed0f8d3b Change license to GPL-3.0 2022-06-27 22:31:36 +02:00
ByteDream
7b16938b52
Merge pull request #42 from ByteDream/v3/feature/info-command
Add info command
2022-06-27 18:46:57 +02:00
ByteDream
32885fd36c
Merge pull request #41 from ByteDream/v3/feature/more-common-api-endpoints
Add more api endpoints
2022-06-26 14:43:56 +02:00
bytedream
cba8968f17 Resolve conflics 2022-06-26 14:41:42 +02:00
bytedream
79c3ba2636 Refactor to use consts instead of string 2022-06-24 11:51:48 +02:00
bytedream
3dcfbc0fbb Add docs to wallpaper 2022-06-24 11:51:12 +02:00
bytedream
2569ddd1c7 Add docs to crunchylists 2022-06-24 11:34:02 +02:00
bytedream
fa2321e9e8 Rename 'Logout' to 'InvalidateSession' 2022-06-23 17:40:29 +02:00
bytedream
1a4abdc4d8 Made 'l' in crunchylist lowercase and made CrunchylistFromID priave 2022-06-23 17:31:26 +02:00
bytedream
e6172cdf90 Change watchlist option order type and content type 2022-06-23 17:16:47 +02:00
bytedream
14491ce6c9 Rename 'markedAs' to 'votedAs' 2022-06-23 17:01:08 +02:00
bytedream
4cfcc11e20 Simplified 'Liked' 2022-06-23 17:00:46 +02:00
bytedream
0521895f11 Add function to check if comment is flagged as spoiler 2022-06-23 16:59:54 +02:00
bytedream
f03287856b Add function to check if comment is reported 2022-06-23 16:59:04 +02:00
bytedream
9919a48e9a Change 'UnreportComment' to 'RemoveReport' 2022-06-23 16:57:49 +02:00
bytedream
ead1db2be8 Add function to check if a comment is liked by the logged-in user 2022-06-23 16:50:48 +02:00
bytedream
28070bd32d Add function to check if a comment is marked as spoiler 2022-06-23 16:44:34 +02:00
bytedream
a283ba7247 Add functions to get wallpaper urls 2022-06-23 14:27:15 +02:00
bytedream
bfad0caa9a Update email language and video sub language setting function names 2022-06-23 13:52:10 +02:00
ByteDream
2a05a74cc9
Merge pull request #40 from ByteDream/dependabot/go_modules/github.com/spf13/cobra-1.5.0
Bump github.com/spf13/cobra from 1.4.0 to 1.5.0
2022-06-22 12:14:30 +02:00
dependabot[bot]
810f3ae12e
Bump github.com/spf13/cobra from 1.4.0 to 1.5.0
Bumps [github.com/spf13/cobra](https://github.com/spf13/cobra) from 1.4.0 to 1.5.0.
- [Release notes](https://github.com/spf13/cobra/releases)
- [Commits](https://github.com/spf13/cobra/compare/v1.4.0...v1.5.0)

---
updated-dependencies:
- dependency-name: github.com/spf13/cobra
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-22 01:35:26 +00:00
bytedream
2067c50937 Add logout endpoint 2022-06-21 22:04:22 +02:00
bytedream
f71846628d Refactor crunchyroll.go 2022-06-21 22:02:18 +02:00
bytedream
ec872d8c86 Move functions into their own, separate files & add docs 2022-06-21 21:15:49 +02:00
bytedream
d1859b4c25 Change const names to make them more readable 2022-06-21 17:43:12 +02:00
bytedream
256c97c2b7 Fix Wallpaper type not found 2022-06-20 10:24:01 +02:00
bytedream
715ade831c Add more account and profile endpoints 2022-06-20 10:22:17 +02:00
bytedream
cee3410532 Add comment endpoint 2022-06-19 14:43:46 +02:00
bytedream
0c93893627 Fix error handling caused panic (again) 2022-06-19 13:38:41 +02:00
bytedream
475dc34f7a Fix error handling caused panic 2022-06-19 13:31:29 +02:00
bytedream
c86595d2c6 Add image to common structs 2022-06-19 02:28:03 +02:00
bytedream
9f6a225caf Add better error output 2022-06-19 02:00:58 +02:00
bytedream
72484c78af Add crunchylists endpoint 2022-06-19 01:56:39 +02:00
bytedream
aa088cb318 Fix error printing caused panic 2022-06-19 01:42:57 +02:00
bytedream
8ddb436fac Move WatchlistEntry struct to own file 2022-06-19 01:03:10 +02:00
bytedream
f9792aa847 Add extra file for common in different places used elements 2022-06-19 00:54:30 +02:00
bytedream
5709012dfe Add custom error for internal request 2022-06-19 00:36:27 +02:00
bytedream
c5f2b55f34 Add watchlist endpoint, add new request method & change SortType name and consts 2022-06-19 00:22:38 +02:00
ByteDream
8d69b7775b
Add name poll notice 2022-06-12 14:23:13 +02:00
ByteDream
170fb2efb8
Merge pull request #38 from ByteDream/v3/feature/non-premium-support
Extend support for non premium accounts
2022-06-10 16:14:44 +02:00
bytedream
137f3779ea Fix merge issues again, I love this shit 2022-06-10 16:12:58 +02:00
ByteDream
ae075ed4c9
Merge branch 'next/v3' into v3/feature/non-premium-support 2022-06-10 16:04:25 +02:00
bytedream
31d3065e7b Fix weird code which was probably caused by a wonderful merge 2022-06-09 10:31:16 +02:00
bytedream
dcdde6749e Add info command 2022-06-09 09:56:32 +02:00
bytedream
69d2e10362 I fucking hate it 2022-06-08 14:18:20 +02:00
ByteDream
223459526e
Merge pull request #37 from ByteDream/v3/feature/encrypted-credentials
Add support for encrypted credentials
2022-06-08 14:13:22 +02:00
ByteDream
735136077e
I removed it but ok github if you want i do it a second time, no problem 2022-06-08 14:12:30 +02:00
ByteDream
c963af1f11
Why github 2022-06-08 14:10:46 +02:00
ByteDream
a20c70cd2b
Merge branch 'next/v3' into v3/feature/encrypted-credentials 2022-06-08 14:08:34 +02:00
ByteDream
51b5e7b2ff
Add bug issue templates 2022-06-06 21:23:29 +02:00
bytedream
ce29e31164 Add encrypt flag notice when use login without it 2022-05-31 17:04:39 +02:00
bytedream
b9ff56c111 Extend encrypt flag description 2022-05-31 17:01:03 +02:00
ByteDream
141173d3c8
Merge pull request #35 from ByteDream/v3/feature/common-api-endpoints
Add more api endpoints
2022-05-30 12:34:18 +02:00
bytedream
38fe521d55 Resolve more merge conflicts which GitHub didn't save lul 2022-05-30 12:32:13 +02:00
ByteDream
35b1cbbdb4
Fix typo 2022-05-30 12:27:20 +02:00
ByteDream
3a7ec02598
Resolve merge conflicts 2022-05-30 12:24:54 +02:00
ByteDream
0092867b97
Merge branch 'next/v3' into v3/feature/common-api-endpoints 2022-05-30 12:19:20 +02:00
bytedream
1c37c3e699 Add method parameter to internal request function 2022-05-30 12:08:52 +02:00
ByteDream
e31b8730da
Merge pull request #34 from ByteDream/v3/feature/full-beta-login
Extend login mechanism
2022-05-30 09:18:37 +02:00
bytedream
2471042d02 Change error formatter for non Errorf 2022-05-30 08:56:30 +02:00
bytedream
6581a5bd0f Fix typo 2022-05-29 16:10:37 +02:00
ByteDream
048d1ba782
Merge pull request #33 from IchBinLeoon/master
Implement more beta api endpoints
2022-05-29 15:29:25 +02:00
IchBinLeoon
29343d1c6f Add requested changes 2022-05-29 15:08:38 +02:00
ByteDream
b53256ca3f
Use Account struct directly instead of maps 2022-05-29 10:10:48 +02:00
IchBinLeoon
acc6c63ebd Fix comment 2022-05-29 01:42:24 +02:00
IchBinLeoon
7897da3baf Add account and update comments 2022-05-29 00:21:17 +02:00
IchBinLeoon
cf35596985 Add watch history 2022-05-28 19:55:56 +02:00
IchBinLeoon
08c46e50bb Add new endpoints 2022-05-27 19:05:35 +02:00
bytedream
15373ed7d6 Fix typo 2022-05-27 16:03:37 +02:00
bytedream
b4ba8c4599 Bump go CI version to 1.18 2022-05-27 16:00:08 +02:00
bytedream
0780a2a2bc Change login type from session id to etp rt 2022-05-27 15:59:00 +02:00
bytedream
c94ce0fb59 Fix country code not set 2022-05-27 15:58:28 +02:00
bytedream
638689ee32 Add new login method & deprecated login with session id 2022-05-27 15:13:15 +02:00
bytedream
382d19ee94 Fix login not working with session id 2022-05-27 11:17:08 +02:00
bytedream
2d28991a70 Remove v2 release notice 2022-05-21 21:41:25 +02:00
bytedream
eb2414d012 Update module version to v3 2022-05-21 21:38:45 +02:00
bytedream
b78d6a7871 Change default Makefile version variable to development 2022-05-21 00:29:03 +02:00
bytedream
4d65d2f2df Bump CI go version to 1.18 2022-05-20 23:05:38 +02:00
bytedream
a4ec163275 Add basic encrypted login credentials support 2022-05-20 22:57:07 +02:00
bytedream
b491ba0f58 Merge branch 'feature/update-command' into v3 2022-05-18 22:26:40 +02:00
bytedream
608e03bc11 Add better update output 2022-05-18 22:21:49 +02:00
bytedream
d4e095a576 Fix typo 2022-05-18 21:56:29 +02:00
bytedream
7be803d485 Add extended error message if account is non-premium 2022-05-18 21:54:31 +02:00
bytedream
f635bf1a2e Add available function to check if season streams are available 2022-05-18 21:50:41 +02:00
bytedream
43be2eee14 Fix typo & add audio locale todo for non-premium accounts 2022-05-18 21:50:41 +02:00
bytedream
0ffae4ddda Add available function to check if episode streams are available 2022-05-18 21:50:30 +02:00
bytedream
5d732123d9 Change cli name from crunchyroll to crunchyroll-go 2022-05-18 11:35:21 +02:00
bytedream
00ea7635eb Version 2.2.2 2022-05-18 11:18:27 +02:00
bytedream
b92eddc5d2 Add temporary session id lookup on login first 2022-05-18 11:14:35 +02:00
bytedream
ea3506b7f6 Fix logging in message shown for too long 2022-05-18 11:11:18 +02:00
bytedream
b70bf9902b Add error return in some login failure cases (#30) 2022-05-18 11:01:49 +02:00
bytedream
0fa829828f Fix 1080p, 720p, ... not working 2022-05-16 22:42:31 +02:00
bytedream
6c476df24e Set beta url notice only if account is premium 2022-05-16 22:07:44 +02:00
bytedream
5b3466d06d Add stream not available with non-premium error notice 2022-05-16 22:06:44 +02:00
bytedream
afa975c459 Add session id always cached in temp directory (to prevent #30) 2022-05-16 21:21:35 +02:00
bytedream
f51bdeaec7 Add error return in some login failure cases (#30) 2022-05-16 20:03:52 +02:00
bytedream
62735cf07c Change request url for some request & regex 2022-05-16 19:28:05 +02:00
bytedream
192a85afb8 Use fmt.Errorf instead of errors.New & new invalid session id error message 2022-05-13 19:53:16 +02:00
bytedream
f046b68371 Update workflows running on every branch 2022-05-13 19:34:28 +02:00
bytedream
901bbf0706 Add update command 2022-05-13 19:22:17 +02:00
bytedream
3ee53c0cab Bump go version to 1.18 2022-05-13 18:38:38 +02:00
bytedream
e7e106f74d Fix version not set when building with make 2022-05-08 19:14:16 +02:00
ByteDream
11b0f2b48b
Fix typos 2022-05-08 15:40:36 +02:00
bytedream
1c1dd5ec4b Version 2.2.1 2022-05-08 11:07:00 +02:00
bytedream
ed8e63c268 Fix locale panic (#29) 2022-05-08 11:06:37 +02:00
bytedream
5e3636015b Remove AccessError error struct 2022-05-07 10:46:25 +02:00
bytedream
7db4ca6b93 Fix typos 2022-05-06 11:22:32 +02:00
bytedream
df44104900 Update example urls to their beta equivalents 2022-05-04 08:17:33 +02:00
bytedream
ad703fb985 Update documentation url 2022-05-01 17:12:29 +02:00
bytedream
a590da8231 Version 2.2.0 2022-05-01 14:04:06 +02:00
bytedream
ddfd2b44a1 Update library get option to v2 2022-05-01 14:03:17 +02:00
bytedream
80b0784f50 Update module to v2 2022-05-01 13:26:25 +02:00
bytedream
a49e65e151 Enable autosuggestions but hide it from commands 2022-05-01 13:26:25 +02:00
ByteDream
e65535bf00
Merge pull request #28 from hekmon/silent_login_fail
Silent login fail error handling (#27)
2022-05-01 12:22:48 +02:00
ByteDream
1f1f6849dc
Fix typo 2022-04-29 15:50:10 +02:00
Hekmon
413949797c
avoid copy error to be shadowed 2022-04-29 11:49:23 +02:00
Hekmon
362708cf35
handle json unmarshall error 2022-04-29 11:45:00 +02:00
Hekmon
187a0c8817
add missing defer 2022-04-29 11:43:43 +02:00
Hekmon
aa3f8e1b34
handle json parsing errors 2022-04-29 11:42:25 +02:00
Hekmon
353f425bbf
typo fix 2022-04-29 11:36:45 +02:00
Hekmon
db47eeb11c
handle login with creds errors 2022-04-29 11:34:17 +02:00
Hekmon
037df1d16f
handle session queries not being valid 2022-04-29 11:31:01 +02:00
bytedream
48595f25fa Update debug print variable 2022-04-28 10:21:02 +02:00
bytedream
0c92fc0989 Deactivate subtitles by default (#26) 2022-04-27 10:53:40 +02:00
ByteDream
1e865adaa5
Typo fix in scoop install 2022-04-26 08:53:02 +02:00
ByteDream
c9ad097d85
Fix typo 2022-04-26 08:42:01 +02:00
bytedream
980c28a754 Add scoop installer instructions 2022-04-26 08:34:50 +02:00
bytedream
06a5414210 Add video, audio and subtitle as locale (#26) 2022-04-26 08:34:33 +02:00
ByteDream
68aa7e903f
Update go build instructions 2022-04-22 12:30:33 +02:00
ByteDream
58bf549964
Update README.md 2022-04-19 21:32:19 +02:00
ByteDream
aeb159960c
Merge pull request #25 from IchBinLeoon/master
Rename getCmd variable to downloadCmd
2022-04-18 17:02:04 +02:00
IchBinLeoon
be3d33744f Rename getCmd variable to downloadCmd 2022-04-18 16:56:27 +02:00
ByteDream
fd3d945c3a
Create codeql-analysis.yml 2022-04-18 15:43:50 +02:00
ByteDream
580ea74902
Update ci badge 2022-04-18 10:24:56 +02:00
ByteDream
2d08e98538
Add ci badge 2022-04-17 21:33:20 +02:00
bytedream
6385457c10 Add --version command description (#24) 2022-04-17 21:28:53 +02:00
ByteDream
46331f4c7e
Merge pull request #24 from IchBinLeoon/master
Add --version flag to cli root command
2022-04-17 21:09:09 +02:00
IchBinLeoon
4b5b187730 Add version to cli root command 2022-04-17 18:57:42 +02:00
ByteDream
543b9c3668
Update ci.yml 2022-04-16 02:02:10 +02:00
ByteDream
a98eb56fba
Create ci.yml 2022-04-16 01:59:34 +02:00
ByteDream
9ccd1ed93c
Create dependabot.yml 2022-04-16 01:27:59 +02:00
bytedream
b524a1a7dd Version 2.1.0 2022-04-16 01:08:18 +02:00
bytedream
598e460e6c Add custom useragent for cli request 2022-04-16 01:07:56 +02:00
bytedream
3617955bc5 Fix typos & add more comments 2022-04-16 00:17:36 +02:00
bytedream
2e9ce3cf52 Deprecated find video and optimized find episode (as result of #22) 2022-04-15 23:45:09 +02:00
bytedream
e2f42493ac Fix credential file overwrite with session id 2022-04-15 22:38:06 +02:00
bytedream
253d8712c8 Add /videos suffix support for classic series url 2022-04-15 22:34:45 +02:00
bytedream
db30b9eadc Add notice when downloading via cli and no episodes could be found 2022-04-15 22:16:45 +02:00
bytedream
02bd33ef59 Deprecate pure crunchyroll classic url functions 2022-04-15 22:14:11 +02:00
bytedream
cda7bc9d35 Add login file fallback and session id caching if logging in with credentials 2022-04-15 22:01:10 +02:00
bytedream
8dcbced9c7 Version 2.0.2 2022-04-02 20:16:45 +02:00
bytedream
d34fd10516 Refactoring 2022-03-28 20:05:40 +02:00
bytedream
d27fc67288 Remove empty output on last download 2022-03-28 20:02:51 +02:00
bytedream
e0069a10e0 Rename variables 2022-03-28 19:57:19 +02:00
bytedream
e4d075c855 Fix download not converting into other media formats if specified 2022-03-28 19:56:09 +02:00
bytedream
051cad4537 Fix download and goroutines flag not working with archive (#19) 2022-03-28 19:48:19 +02:00
bytedream
6b3635aef3 Made symbolic link to second binary in install target relative 2022-03-26 20:30:02 +01:00
71 changed files with 11033 additions and 4396 deletions

1
.envrc Normal file
View file

@ -0,0 +1 @@
use flake

30
.github/ISSUE_TEMPLATE/bug-report.md vendored Normal file
View file

@ -0,0 +1,30 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps / command to reproduce the behavior:
```
$ crunchy ...
```
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Client (please complete the following information):**
- OS: [e.g. Windows]
- Version [e.g. 3.0.0-dev.8 (17233f2 2023-01-10)] <!-- Version 1 or 2 aren't actively supported anymore. Make sure that the bug occurs on the master branch or a version 3 pre-release -->
**Additional context**
Add any other context about the problem here.

View file

@ -0,0 +1,17 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Additional context**
Add any other context or screenshots about the feature request here.

View file

@ -0,0 +1,48 @@
# Maintainer: ByteDream
pkgname=crunchy-cli-bin
pkgdesc="Command-line downloader for Crunchyroll"
arch=('x86_64' 'aarch64')
url="https://github.com/crunchy-labs/crunchy-cli"
license=('MIT')
pkgver=$CI_PKG_VERSION
pkgrel=1
depends=('ffmpeg')
provides=('crunchy-cli')
conflicts=('crunchy-cli')
source_x86_64=(
"crunchy-cli::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-linux-x86_64"
"manpages.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-manpages.zip"
"completions.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-completions.zip"
"LICENSE::https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/v${pkgver}/LICENSE"
)
source_aarch64=(
"crunchy-cli::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-linux-aarch64"
"manpages.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-manpages.zip"
"completions.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-completions.zip"
"LICENSE::https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/v${pkgver}/LICENSE"
)
noextract=("manpages.zip" "completions.zip")
sha256sums_x86_64=('$CI_AMD_BINARY_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM')
sha256sums_aarch64=('$CI_ARM_BINARY_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM')
package() {
cd "$srcdir"
# all files in manpages.zip and completions.zip are stored in root of the archive, makepkg extracts them all to $srcdir
# which makes it pretty messy. so the extraction is done manually to keep the content of $srcdir structured
mkdir manpages completions
cd manpages
bsdtar -xf ../manpages.zip
cd ../completions
bsdtar -xf ../completions.zip
cd ..
install -Dm755 crunchy-cli $pkgdir/usr/bin/crunchy-cli
install -Dm644 manpages/* -t $pkgdir/usr/share/man/man1
install -Dm644 completions/crunchy-cli.bash $pkgdir/usr/share/bash-completion/completions/crunchy-cli
install -Dm644 completions/_crunchy-cli $pkgdir/usr/share/zsh/site-functions/_crunchy-cli
install -Dm644 completions/crunchy-cli.fish $pkgdir/usr/share/fish/vendor_completions.d/crunchy-cli.fish
install -Dm644 LICENSE $pkgdir/usr/share/licenses/crunchy-cli/LICENSE
}

View file

@ -0,0 +1,46 @@
# Maintainer: ByteDream
pkgname=crunchy-cli
pkgdesc="Command-line downloader for Crunchyroll"
arch=('x86_64' 'i686' 'arm' 'armv6h' 'armv7h' 'aarch64')
url="https://github.com/crunchy-labs/crunchy-cli"
license=('MIT')
pkgver=$CI_PKG_VERSION
pkgrel=1
depends=('ffmpeg' 'openssl')
makedepends=('cargo')
source=("${pkgname}-${pkgver}.tar.gz::https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/v${pkgver}.tar.gz")
sha256sums=('$CI_SHA_SUM')
# lto causes linking errors when executed by this buildscript. besides, lto is already done by cargo itself (which doesn't cause linking errors)
options=(!lto)
prepare() {
cd "$srcdir/${pkgname}-$pkgver"
export RUSTUP_TOOLCHAIN=stable
export CARGO_HOME="$srcdir/cargo-home"
cargo fetch --locked --target "$(rustc -vV | sed -n 's/host: //p')"
}
build() {
cd "$srcdir/${pkgname}-$pkgver"
export RUSTUP_TOOLCHAIN=stable
export CARGO_HOME="$srcdir/cargo-home"
export CRUNCHY_CLI_GIT_HASH=$CI_GIT_HASH
cargo build --frozen --release
}
package() {
cd "$srcdir/${pkgname}-$pkgver"
install -Dm755 target/release/crunchy-cli $pkgdir/usr/bin/crunchy-cli
install -Dm644 target/release/manpages/* -t $pkgdir/usr/share/man/man1
install -Dm644 target/release/completions/crunchy-cli.bash $pkgdir/usr/share/bash-completion/completions/crunchy-cli
install -Dm644 target/release/completions/_crunchy-cli $pkgdir/usr/share/zsh/site-functions/_crunchy-cli
install -Dm644 target/release/completions/crunchy-cli.fish $pkgdir/usr/share/fish/vendor_completions.d/crunchy-cli.fish
install -Dm644 LICENSE $pkgdir/usr/share/licenses/crunchy-cli/LICENSE
}

145
.github/workflows/build.yml vendored Normal file
View file

@ -0,0 +1,145 @@
name: build
on:
push:
branches:
- '*'
pull_request:
workflow_dispatch:
jobs:
build-linux:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- arch: x86_64
toolchain: x86_64-unknown-linux-musl
- arch: aarch64
toolchain: aarch64-unknown-linux-musl
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cargo cache
uses: actions/cache@v4
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ matrix.toolchain }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- name: Install cross
run: cargo install --force cross
- name: Build
run: cross build --locked --release --no-default-features --features openssl-tls-static --target ${{ matrix.toolchain }}
- name: Upload binary artifact
uses: actions/upload-artifact@v4
with:
name: crunchy-cli-linux-${{ matrix.arch }}
path: ./target/${{ matrix.toolchain }}/release/crunchy-cli
if-no-files-found: error
- name: Upload manpages artifact
if: ${{ matrix.arch == 'x86_64' }} # only upload the manpages once
uses: actions/upload-artifact@v4
with:
name: manpages
path: ./target/${{ matrix.toolchain }}/release/manpages
if-no-files-found: error
- name: Upload completions artifact
if: ${{ matrix.arch == 'x86_64' }} # only upload the completions once
uses: actions/upload-artifact@v4
with:
name: completions
path: ./target/${{ matrix.toolchain }}/release/completions
if-no-files-found: error
build-mac:
runs-on: ${{ matrix.os }}
strategy:
matrix:
# macos-13 uses x86_64, macos-14 aarch64
# see https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources
include:
- os: macos-13
arch: x86_64
toolchain: x86_64-apple-darwin
- os: macos-14
arch: aarch64
toolchain: aarch64-apple-darwin
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cargo cache
if: ${{ matrix.os != 'macos-13' }} # when using cache, the 'Setup Rust' step fails for macos 13
uses: actions/cache@v4
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: x86_64-apple-darwin-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- name: Build
run: cargo build --locked --release --target ${{ matrix.toolchain }}
- name: Upload binary artifact
uses: actions/upload-artifact@v4
with:
name: crunchy-cli-darwin-${{ matrix.arch }}
path: ./target/${{ matrix.toolchain }}/release/crunchy-cli
if-no-files-found: error
build-windows:
runs-on: windows-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cargo cache
uses: actions/cache@v4
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: x86_64-pc-windows-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Install system dependencies
uses: msys2/setup-msys2@v2
with:
update: true
install: mingw-w64-x86_64-rust base-devel
- name: Build
shell: msys2 {0}
run: cargo build --locked --release --target x86_64-pc-windows-gnu
- name: Upload binary artifact
uses: actions/upload-artifact@v4
with:
name: crunchy-cli-windows-x86_64
path: ./target/x86_64-pc-windows-gnu/release/crunchy-cli.exe
if-no-files-found: error

58
.github/workflows/lint.yml vendored Normal file
View file

@ -0,0 +1,58 @@
name: lint
on:
push:
branches:
- '*'
pull_request:
jobs:
fmt:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cargo cache
uses: actions/cache@v4
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: x86_64-unknown-linux-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- name: Check fmt
run: cargo fmt --check
lint:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cargo cache
uses: actions/cache@v4
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: x86_64-unknown-linux-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- name: Lint
run: cargo clippy -- -D warnings

74
.github/workflows/publish.yml vendored Normal file
View file

@ -0,0 +1,74 @@
name: publish
on:
push:
tags:
- v*
jobs:
publish-aur:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Get version
run: echo "RELEASE_VERSION=$(echo ${{ github.ref_name }} | cut -c 2-)" >> $GITHUB_ENV
- name: Generate crunchy-cli sha sum
run: |
curl -LO https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/${{ github.ref_name }}.tar.gz
echo "CRUNCHY_CLI_SHA256=$(sha256sum ${{ github.ref_name }}.tar.gz | cut -f 1 -d ' ')" >> $GITHUB_ENV
- name: Get release commit hash
run: echo "CRUNCHY_CLI_GIT_HASH=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
- name: Generate crunchy-cli PKGBUILD
env:
CI_PKG_VERSION: ${{ env.RELEASE_VERSION }}
CI_SHA_SUM: ${{ env.CRUNCHY_CLI_SHA256 }}
CI_GIT_HASH: ${{ env.CRUNCHY_CLI_GIT_HASH }}
run: envsubst '$CI_PKG_VERSION,$CI_SHA_SUM,$CI_GIT_HASH' < .github/workflow-resources/PKGBUILD.source > PKGBUILD
- name: Publish crunchy-cli to AUR
uses: KSXGitHub/github-actions-deploy-aur@v2.7.0
with:
pkgname: crunchy-cli
pkgbuild: ./PKGBUILD
commit_username: release-action
commit_email: ${{ secrets.AUR_EMAIL }}
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
commit_message: Update to version ${{ env.RELEASE_VERSION }}
- name: Generate crunchy-cli-bin sha sums
run: |
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-linux-x86_64
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-linux-aarch64
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-completions.zip
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-manpages.zip
curl -LO https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/${{ github.ref_name }}/LICENSE
echo "CRUNCHY_CLI_BIN_x86_64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-linux-x86_64 | cut -f 1 -d ' ')" >> $GITHUB_ENV
echo "CRUNCHY_CLI_BIN_aarch64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-linux-aarch64 | cut -f 1 -d ' ')" >> $GITHUB_ENV
echo "CRUNCHY_CLI_BIN_COMPLETIONS_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-completions.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV
echo "CRUNCHY_CLI_BIN_MANPAGES_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-manpages.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV
echo "CRUNCHY_CLI_BIN_LICENSE_SHA256=$(sha256sum LICENSE | cut -f 1 -d ' ')" >> $GITHUB_ENV
- name: Generate crunchy-cli-bin PKGBUILD
env:
CI_PKG_VERSION: ${{ env.RELEASE_VERSION }}
CI_AMD_BINARY_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_x86_64_SHA256 }}
CI_ARM_BINARY_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_aarch64_SHA256 }}
CI_MANPAGES_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_MANPAGES_SHA256 }}
CI_COMPLETIONS_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_COMPLETIONS_SHA256 }}
CI_LICENSE_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_LICENSE_SHA256 }}
run: envsubst '$CI_PKG_VERSION,$CI_AMD_BINARY_SHA_SUM,$CI_ARM_BINARY_SHA_SUM,$CI_COMPLETIONS_SHA_SUM,$CI_MANPAGES_SHA_SUM,$CI_LICENSE_SHA_SUM' < .github/workflow-resources/PKGBUILD.binary > PKGBUILD
- name: Publish crunchy-cli-bin to AUR
uses: KSXGitHub/github-actions-deploy-aur@v2.7.0
with:
pkgname: crunchy-cli-bin
pkgbuild: ./PKGBUILD
commit_username: release-action
commit_email: ${{ secrets.AUR_EMAIL }}
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
commit_message: Update to version ${{ env.RELEASE_VERSION }}

10
.gitignore vendored Normal file
View file

@ -0,0 +1,10 @@
# Rust
/target
# Editor
/.idea
/.vscode
# Nix
/result
/.direnv

2506
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

42
Cargo.toml Normal file
View file

@ -0,0 +1,42 @@
[package]
name = "crunchy-cli"
authors = ["Crunchy Labs Maintainers"]
version = "3.6.7"
edition = "2021"
license = "MIT"
[features]
default = ["native-tls"]
rustls-tls = ["crunchy-cli-core/rustls-tls"]
native-tls = ["crunchy-cli-core/native-tls"]
openssl-tls = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls"]
openssl-tls-static = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls-static"]
[dependencies]
tokio = { version = "1.38", features = ["macros", "rt-multi-thread", "time"], default-features = false }
native-tls-crate = { package = "native-tls", version = "0.2.12", optional = true }
crunchy-cli-core = { path = "./crunchy-cli-core" }
[build-dependencies]
chrono = "0.4"
clap = { version = "4.5", features = ["string"] }
clap_complete = "4.5"
clap_mangen = "0.2"
crunchy-cli-core = { path = "./crunchy-cli-core" }
[workspace]
members = ["crunchy-cli-core"]
[patch.crates-io]
# fork of the `native-tls` crate which can use openssl as backend on every platform. this is done as `reqwest` only
# supports `rustls` and `native-tls` as tls backend
native-tls = { git = "https://github.com/crunchy-labs/rust-not-so-native-tls.git", rev = "c7ac566" }
[profile.release]
strip = true
opt-level = "z"
lto = true

80
LICENSE
View file

@ -1,61 +1,25 @@
Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
Copyright (c) 2023-NOW Crunchy Labs Team
Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
0. Additional Definitions.
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View file

@ -1,34 +0,0 @@
VERSION=2.0.1
BINARY_NAME=crunchy
VERSION_BINARY_NAME=$(BINARY_NAME)-v$(VERSION)
DESTDIR=
PREFIX=/usr
build:
cd cmd/crunchyroll-go && go build -o $(BINARY_NAME)
mv cmd/crunchyroll-go/$(BINARY_NAME) .
clean:
rm -f $(BINARY_NAME) $(VERSION_BINARY_NAME)_*
install:
install -Dm755 $(BINARY_NAME) $(DESTDIR)$(PREFIX)/bin/crunchyroll-go
ln -sf $(DESTDIR)$(PREFIX)/bin/crunchyroll-go $(DESTDIR)$(PREFIX)/bin/crunchy
install -Dm644 crunchyroll-go.1 $(DESTDIR)$(PREFIX)/share/man/man1/crunchyroll-go.1
install -Dm644 LICENSE $(DESTDIR)$(PREFIX)/share/licenses/crunchyroll-go/LICENSE
uninstall:
rm -f $(DESTDIR)$(PREFIX)/bin/crunchyroll-go
rm -f $(DESTDIR)$(PREFIX)/bin/crunchy
rm -f $(DESTDIR)$(PREFIX)/share/man/man1/crunchyroll-go.1
rm -f $(DESTDIR)$(PREFIX)/share/licenses/crunchyroll-go/LICENSE
release:
cd cmd/crunchyroll-go && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o $(VERSION_BINARY_NAME)_linux
cd cmd/crunchyroll-go && CGO_ENABLED=0 GOOS=windows GOARCH=amd64 go build -o $(VERSION_BINARY_NAME)_windows.exe
cd cmd/crunchyroll-go && CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 go build -o $(VERSION_BINARY_NAME)_darwin
strip cmd/crunchyroll-go/$(VERSION_BINARY_NAME)_linux
mv cmd/crunchyroll-go/$(VERSION_BINARY_NAME)_* .

831
README.md
View file

@ -1,211 +1,734 @@
<p align="center"><strong>Version 2 is out 🥳, see all the <a href="https://github.com/ByteDream/crunchyroll-go/releases/tag/v2.0.0">changes.</a></strong></p>
# This project has been sunset as Crunchyroll moved to a DRM-only system. See [#362](https://github.com/crunchy-labs/crunchy-cli/issues/362).
# crunchyroll-go
# crunchy-cli
A [Go](https://golang.org) library & cli for the undocumented [crunchyroll](https://www.crunchyroll.com) api. To use it, you need a crunchyroll premium account to for full (api) access.
👇 A Command-line downloader for [Crunchyroll](https://www.crunchyroll.com).
<p align="center">
<a href="https://github.com/ByteDream/crunchyroll-go">
<img src="https://img.shields.io/github/languages/code-size/ByteDream/crunchyroll-go?style=flat-square" alt="Code size">
<a href="https://github.com/crunchy-labs/crunchy-cli">
<img src="https://img.shields.io/github/languages/code-size/crunchy-labs/crunchy-cli?style=flat-square" alt="Code size">
</a>
<a href="https://github.com/ByteDream/crunchyroll-go/releases/latest">
<img src="https://img.shields.io/github/downloads/ByteDream/crunchyroll-go/total?style=flat-square" alt="Download Badge">
<a href="https://github.com/crunchy-labs/crunchy-cli/releases/latest">
<img src="https://img.shields.io/github/downloads/crunchy-labs/crunchy-cli/total?style=flat-square" alt="Download Badge">
</a>
<a href="https://github.com/ByteDream/crunchyroll-go/blob/master/LICENSE">
<img src="https://img.shields.io/github/license/ByteDream/crunchyroll-go?style=flat-square" alt="License">
<a href="https://github.com/crunchy-labs/crunchy-cli/blob/master/LICENSE">
<img src="https://img.shields.io/github/license/crunchy-labs/crunchy-cli?style=flat-square" alt="License">
</a>
<a href="https://golang.org">
<img src="https://img.shields.io/github/go-mod/go-version/ByteDream/crunchyroll-go?style=flat-square" alt="Go version">
<a href="https://github.com/crunchy-labs/crunchy-cli/releases">
<img src="https://img.shields.io/github/v/release/crunchy-labs/crunchy-cli?include_prereleases&style=flat-square" alt="Release">
</a>
<a href="https://github.com/ByteDream/crunchyroll-go/releases/latest">
<img src="https://img.shields.io/github/v/release/ByteDream/crunchyroll-go?style=flat-square" alt="Release">
<a href="https://discord.gg/PXGPGpQxgk">
<img src="https://img.shields.io/discord/994882878125121596?label=discord&style=flat-square" alt="Discord">
</a>
<a href="https://discord.gg/gUWwekeNNg">
<img src="https://img.shields.io/discord/915659846836162561?label=discord&style=flat-square" alt="Discord">
<a href="https://github.com/crunchy-labs/crunchy-cli/actions/workflows/build.yml">
<img src="https://img.shields.io/github/actions/workflow/status/crunchy-labs/crunchy-cli/build.yml?branch=master&style=flat-square" alt="Build">
</a>
</p>
<p align="center">
<a href="#%EF%B8%8F-cli">CLI 🖥️</a>
<a href="#%EF%B8%8F-usage">Usage 🖥️</a>
<a href="#-library">Library 📚</a>
<a href="#%EF%B8%8F-disclaimer">Disclaimer ☝️</a>
<a href="#-disclaimer">Disclaimer 📜</a>
<a href="#-license">License ⚖</a>
</p>
# 🖥️ CLI
> We are in no way affiliated with, maintained, authorized, sponsored, or officially associated with Crunchyroll LLC or any of its subsidiaries or affiliates.
> The official Crunchyroll website can be found at [www.crunchyroll.com](https://www.crunchyroll.com/).
## ✨ Features
- Download single videos and entire series from [crunchyroll](https://www.crunchyroll.com).
- Archive episode or seasons in an `.mkv` file with multiple subtitles and audios and compress them to gzip or zip files.
- Specify a range which episodes to download from an anime.
- Download single videos and entire series from [Crunchyroll](https://www.crunchyroll.com).
- Archive episodes or seasons in an `.mkv` file with multiple subtitles and audios.
- Specify a range of episodes to download from an anime.
- Search through the Crunchyroll collection and return metadata (title, duration, direct stream link, ...) of all media types.
## 💾 Get the executable
- 📥 Download the latest binaries [here](https://github.com/ByteDream/crunchyroll-go/releases/latest) or get it from below:
- [Linux (x64)](https://smartrelease.bytedream.org/github/ByteDream/crunchyroll-go/crunchy-{tag}_linux)
- [Windows (x64)](https://smartrelease.bytedream.org/github/ByteDream/crunchyroll-go/crunchy-{tag}_windows.exe)
- [MacOS (x64)](https://smartrelease.bytedream.org/github/ByteDream/crunchyroll-go/crunchy-{tag}_darwin)
- If you use Arch btw. or any other Linux distro which is based on Arch Linux, you can download the package via the [AUR](https://aur.archlinux.org/packages/crunchyroll-go/):
```
$ yay -S crunchyroll-go
```
- 🛠 Build it yourself
- use `make` (requires `go` to be installed):
```
$ git clone https://github.com/ByteDream/crunchyroll-go
$ cd crunchyroll-go
$ make && sudo make install
```
- use `go`:
```
$ git clone https://github.com/ByteDream/crunchyroll-go
$ cd crunchyroll-go/cmd/crunchyroll-go
$ go build -o crunchy
### 📥 Download the latest binaries
Check out the [releases](https://github.com/crunchy-labs/crunchy-cli/releases) tab and get the binary from the latest (pre-)release.
### 📦 Get it via a package manager
- [AUR](https://aur.archlinux.org/)
If you're using Arch or an Arch based Linux distribution you are able to install our [AUR](https://aur.archlinux.org/) package.
You need an [AUR helper](https://wiki.archlinux.org/title/AUR_helpers) like [yay](https://github.com/Jguer/yay) to install it.
```shell
# this package builds crunchy-cli manually (recommended)
$ yay -S crunchy-cli
# this package installs the latest pre-compiled release binary
$ yay -S crunchy-cli-bin
```
## 📝 Examples
- [Scoop](https://scoop.sh/)
_Before reading_: Because of the huge functionality not all cases can be covered in the README.
Make sure to check the [wiki](https://github.com/ByteDream/crunchyroll-go/wiki/Cli), further usages and options are described there.
For Windows users, we support the [scoop](https://scoop.sh/#/) command-line installer.
```shell
$ scoop bucket add extras
$ scoop install extras/crunchy-cli
```
- [Homebrew](https://brew.sh/)
For macOS/linux users, we support the [brew](https://brew.sh/#/) command-line installer. Packages are compiled by the [homebrew project](https://formulae.brew.sh/formula/crunchy-cli), and will also install the `openssl@3` and `ffmpeg` dependencies.
```shell
$ brew install crunchy-cli
```
Supported archs: `x86_64_linux`, `arm64_monterey`, `sonoma`, `ventura`
- [Nix](https://nixos.org/)
This requires [nix](https://nixos.org) and you'll probably need `--extra-experimental-features "nix-command flakes"`, depending on your configurations.
```shell
$ nix <run|shell|develop> github:crunchy-labs/crunchy-cli
```
### 🛠 Build it yourself
Since we do not support every platform and architecture you may have to build the project yourself.
This requires [git](https://git-scm.com/) and [Cargo](https://doc.rust-lang.org/cargo).
```shell
$ git clone https://github.com/crunchy-labs/crunchy-cli
$ cd crunchy-cli
# either just build it (will be available in ./target/release/crunchy-cli)...
$ cargo build --release
# ... or install it globally
$ cargo install --force --path .
```
## 🖥️ Usage
> All shown commands are examples 🧑🏼‍🍳
### Global Flags
crunchy-cli requires you to log in.
Though you can use a non-premium account, you will not have access to premium content without a subscription.
You can authenticate with your credentials (email:password) or by using a refresh token.
- <span id="global-credentials">Credentials</span>
```shell
$ crunchy-cli --credentials "email:password" <command>
```
- <span id="global-anonymous">Stay Anonymous</span>
Login without an account (you won't be able to access premium content):
```shell
$ crunchy-cli --anonymous <command>
```
### Global settings
You can set specific settings which will be
- <span id="global-verbose">Verbose output</span>
If you want to include debug information in the output, use the `-v` / `--verbose` flag to show it.
```shell
$ crunchy-cli -v <command>
```
This flag can't be used in combination with `-q` / `--quiet`.
- <span id="global-quiet">Quiet output</span>
If you want to hide all output, use the `-q` / `--quiet` flag to do so.
This is especially useful if you want to pipe the output video to an external program (like a video player).
```shell
$ crunchy-cli -q <command>
```
This flag can't be used in combination with `-v` / `--verbose`.
- <span id="global-lang">Language</span>
By default, the resulting metadata like title or description are shown in your system language (if Crunchyroll supports it, else in English).
If you want to show the results in another language, use the `--lang` flag to set it.
```shell
$ crunchy-cli --lang de-DE <command>
```
- <span id="global-experimental-fixes">Experimental fixes</span>
Crunchyroll constantly changes and breaks its services or just delivers incorrect answers.
The `--experimental-fixes` flag tries to fix some of those issues.
As the *experimental* in `--experimental-fixes` states, these fixes may or may not break other functionality.
```shell
$ crunchy-cli --experimental-fixes <command>
```
For an overview which parts this flag affects, see the [documentation](https://docs.rs/crunchyroll-rs/latest/crunchyroll_rs/crunchyroll/struct.CrunchyrollBuilder.html) of the underlying Crunchyroll library, all functions beginning with `stabilization_` are applied.
- <span id="global-proxy">Proxy</span>
The `--proxy` flag supports https and socks5 proxies to route all your traffic through.
This may be helpful to bypass the geo-restrictions Crunchyroll has on certain series.
You are also able to set in which part of the cli a proxy should be used.
Instead of a normal url you can also use: `<url>:` (only proxies api requests), `:<url>` (only proxies download traffic), `<url>:<url>` (proxies api requests through the first url and download traffic through the second url).
```shell
$ crunchy-cli --proxy socks5://127.0.0.1:8080 <command>
```
Make sure that proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself.
- <span id="global-user-agent">User Agent</span>
There might be cases where a custom user agent is necessary, e.g. to bypass the cloudflare bot protection (#104).
In such cases, the `--user-agent` flag can be used to set a custom user agent.
```shell
$ crunchy-cli --user-agent "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)" <command>
```
Default is the user agent, defined in the underlying [library](https://github.com/crunchy-labs/crunchyroll-rs).
- <span id="global-speed-limit">Speed limit</span>
If you want to limit how fast requests/downloads should be, you can use the `--speed-limit` flag. Allowed units are `B` (bytes), `KB` (kilobytes) and `MB` (megabytes).
```shell
$ crunchy-cli --speed-limit 10MB
```
### Login
Before you can do something, you have to login first.
The `login` command can store your session, so you don't have to authenticate every time you execute a command.
This can be performed via crunchyroll account email and password.
```
$ crunchy login user@example.com password
```shell
# save the refresh token which gets generated when login with credentials.
# your email and password won't be stored at any time on disk
$ crunchy-cli login --credentials "email:password"
```
or via session id
```
$ crunchy login --session-id 8e9gs135defhga790dvrf2i0eris8gts
```
With the session stored, you do not need to pass `--credentials` / `--anonymous` anymore when you want to execute a command.
### Download
By default, the cli tries to download the episode with your system language as audio.
If no streams with your system language are available, the video will be downloaded with japanese audio and hardsubbed subtitles in your system language.
**If your system language is not supported, an error message will be displayed and en-US (american english) will be chosen as language.**
The `download` command lets you download episodes with a specific audio language and optional subtitles.
```
$ crunchy download https://www.crunchyroll.com/darling-in-the-franxx/episode-1-alone-and-lonesome-759575
```
**Supported urls**
With `-r best` the video(s) will have the best available resolution (mostly 1920x1080 / Full HD).
```
$ crunchy download -r best https://www.crunchyroll.com/darling-in-the-franxx/episode-1-alone-and-lonesome-759575
```
- Single episode (with [episode filtering](#episode-filtering))
```shell
$ crunchy-cli download https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- Series (with [episode filtering](#episode-filtering))
```shell
$ crunchy-cli download https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
The file is by default saved as a `.ts` (mpeg transport stream) file.
`.ts` files may can't be played or are looking very weird (it depends on the video player you are using).
With the `-o` flag, you can change the name (and file ending) of the output file.
So if you want to save it as, for example, `mp4` file, just name it `whatever.mp4`.
**You need [ffmpeg](https://ffmpeg.org) to store the video in other file formats.**
```
$ crunchy download -o "daaaaaaaaaaaaaaaarling.ts" https://www.crunchyroll.com/darling-in-the-franxx/episode-1-alone-and-lonesome-759575
```
**Options**
With the `--audio` flag you can specify which audio the video should have and with `--subtitle` which subtitle it should have.
Type `crunchy help download` to see all available locales.
```
$ crunchy download --audio ja-JP --subtitle de-DE https://www.crunchyroll.com/darling-in-the-franxx
```
- <span id="download-audio">Audio language</span>
##### Flags
Set the audio language with the `-a` / `--audio` flag.
This only works if the url points to a series since episode urls are language specific.
The following flags can be (optional) passed to modify the [download](#download) process.
```shell
$ crunchy-cli download -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
| Short | Extended | Description |
|-------|----------------|--------------------------------------------------------------------------------|
| `-a` | `--audio` | Forces audio of the video(s). |
| `-s` | `--subtitle` | Forces subtitle of the video(s). |
| `-d` | `--directory` | Directory to download the video(s) to. |
| `-o` | `--output` | Name of the output file. |
| `-r` | `--resolution` | The resolution of the video(s). `best` for best resolution, `worst` for worst. |
| `-g` | `--goroutines` | Sets how many parallel segment downloads should be used. |
Default is your system locale. If not supported by Crunchyroll, `en-US` (American English) is the default.
- <span id="download-subtitle">Subtitle language</span>
Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag.
In formats that support it (.mp4, .mov and .mkv ), subtitles are stored as soft-subs. All other formats are hardsubbed: the subtitles will be burned into the video track (cf. [hardsub](https://www.urbandictionary.com/define.php?term=hardsub)) and thus can not be turned off.
```shell
$ crunchy-cli download -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default is none.
- <span id="download-output">Output template</span>
Define an output template by using the `-o` / `--output` flag.
```shell
$ crunchy-cli download -o "ditf.mp4" https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
Default is `{title}.mp4`. See the [Template Options section](#output-template-options) below for more options.
- <span id="download-output-specials">Output template for special episodes</span>
Define an output template which only gets used when the episode is a special (episode number is 0 or has non-zero decimal places) by using the `--output-special` flag.
```shell
$ crunchy-cli download --output-specials "Special EP - {title}" https://www.crunchyroll.com/watch/GY8D975JY/veldoras-journal
```
Default is the template, set by the `-o` / `--output` flag. See the [Template Options section](#output-template-options) below for more options.
- <span id="download-universal-output">Universal output</span>
The output template options can be forced to get sanitized via the `--universal-output` flag to be valid across all supported operating systems (Windows has a lot of characters which aren't allowed in filenames...).
```shell
$ crunchy-cli download --universal-output -o https://www.crunchyroll.com/watch/G7PU4XD48/tales-veldoras-journal-2
```
- <span id="download-resolution">Resolution</span>
The resolution for videos can be set via the `-r` / `--resolution` flag.
```shell
$ crunchy-cli download -r worst https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
Default is `best`.
- <span id="download-language-tagging">Language tagging</span>
You can force the usage of a specific language tagging in the output file with the `--language-tagging` flag.
This might be useful as some video players doesn't recognize the language tagging Crunchyroll uses internally.
```shell
$ crunchy-cli download --language-tagging ietf https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- <span id="download-ffmpeg-preset">FFmpeg Preset</span>
You can specify specific built-in presets with the `--ffmpeg-preset` flag to convert videos to a specific coding while downloading.
Multiple predefined presets how videos should be encoded (h264, h265, av1, ...) are available, you can see them with `crunchy-cli download --help`.
If you need more specific ffmpeg customizations you could either convert the output file manually or use ffmpeg output arguments as value for this flag.
```shell
$ crunchy-cli download --ffmpeg-preset av1-lossless https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- <span id="download-ffmpeg-threads">FFmpeg threads</span>
If you want to manually set how many threads FFmpeg should use, you can use the `--ffmpeg-threads` flag. This does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`.
```shell
$ crunchy-cli download --ffmpeg-threads 4 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- <span id="download-skip-existing">Skip existing</span>
If you re-download a series but want to skip episodes you've already downloaded, the `--skip-existing` flag skips the already existing/downloaded files.
```shell
$ crunchy-cli download --skip-existing https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
- <span id="download-skip-specials">Skip specials</span>
If you doesn't want to download special episodes, use the `--skip-specials` flag to skip the download of them.
```shell
$ crunchy-cli download --skip-specials https://www.crunchyroll.com/series/GYZJ43JMR/that-time-i-got-reincarnated-as-a-slime[S2]
```
- <span id="download-include-chapters">Include chapters</span>
Crunchyroll sometimes provide information about skippable events like the intro or credits.
These information can be stored as chapters in the resulting video file via the `--include-chapters` flag.
```shell
$ crunchy-cli download --include-chapters https://www.crunchyroll.com/watch/G0DUND0K2/the-journeys-end
```
- <span id="download-yes">Yes</span>
Sometimes different seasons have the same season number (e.g. Sword Art Online Alicization and Alicization War of Underworld are both marked as season 3), in such cases an interactive prompt is shown which needs user further user input to decide which season to download.
The `--yes` flag suppresses this interactive prompt and just downloads all seasons.
```shell
$ crunchy-cli download --yes https://www.crunchyroll.com/series/GR49G9VP6/sword-art-online
```
If you've passed the `-q` / `--quiet` [global flag](#global-settings), this flag is automatically set.
- <span id="download-force-hardsub">Force hardsub</span>
If you want to burn-in the subtitles, even if the output format/container supports soft-subs (e.g. `.mp4`), use the `--force-hardsub` flag to do so.
```shell
$ crunchy-cli download --force-hardsub -s en-US https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- <span id="download-threads">Threads</span>
To increase the download speed, video segments are downloaded simultaneously by creating multiple threads.
If you want to manually specify how many threads to use when downloading, do this with the `-t` / `--threads` flag.
```shell
$ crunchy-cli download -t 1 https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
The default thread count is the count of cpu threads your pc has.
### Archive
Archive works just like [download](#download). It downloads the given videos as `.mkv` files and stores all (soft) subtitles in it.
Default audio locales are japanese and your system language (if available) but you can set more or less with the `--language` flag.
The `archive` command lets you download episodes with multiple audios and subtitles and merges it into a `.mkv` file.
**Supported urls**
- Single episode (with [episode filtering](#episode-filtering))
```shell
$ crunchy-cli archive https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- Series (with [episode filtering](#episode-filtering))
```shell
$ crunchy-cli archive https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
**Options**
- <span id="archive-audio">Audio languages</span>
Set the audio language with the `-a` / `--audio` flag. Can be used multiple times.
```shell
$ crunchy-cli archive -a ja-JP -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default is your system locale (if not supported by Crunchyroll, `en-US` (American English) and `ja-JP` (Japanese) are used).
- <span id="archive-subtitle">Subtitle languages</span>
Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag.
```shell
$ crunchy-cli archive -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default is `all` subtitles.
- <span id="archive-output">Output template</span>
Define an output template by using the `-o` / `--output` flag.
_crunchy-cli_ exclusively uses the [`.mkv`](https://en.wikipedia.org/wiki/Matroska) container format, because of its ability to store multiple audio, video and subtitle tracks at once.
```shell
$ crunchy-cli archive -o "{title}.mkv" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default is `{title}.mkv`. See the [Template Options section](#output-template-options) below for more options.
- <span id="archive-output-specials">Output template for special episodes</span>
Define an output template which only gets used when the episode is a special (episode number is 0 or has non-zero decimal places) by using the `--output-special` flag.
_crunchy-cli_ exclusively uses the [`.mkv`](https://en.wikipedia.org/wiki/Matroska) container format, because of its ability to store multiple audio, video and subtitle tracks at once.
```shell
$ crunchy-cli archive --output-specials "Special EP - {title}" https://www.crunchyroll.com/watch/GY8D975JY/veldoras-journal
```
Default is the template, set by the `-o` / `--output` flag. See the [Template Options section](#output-template-options) below for more options.
- <span id="archive-universal-output">Universal output</span>
The output template options can be forced to get sanitized via the `--universal-output` flag to be valid across all supported operating systems (Windows has a lot of characters which aren't allowed in filenames...).
```shell
$ crunchy-cli archive --universal-output -o https://www.crunchyroll.com/watch/G7PU4XD48/tales-veldoras-journal-2
```
- <span id="archive-resolution">Resolution</span>
The resolution for videos can be set via the `-r` / `--resolution` flag.
```shell
$ crunchy-cli archive -r worst https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default is `best`.
- <span id="archive-merge">Merge behavior</span>
Due to censorship or additional intros, some episodes have multiple lengths for different languages.
In the best case, when multiple audio & subtitle tracks are used, there is only one *video* track and all other languages can be stored as audio-only.
But, as said, this is not always the case.
With the `-m` / `--merge` flag you can define the behaviour when an episodes' video tracks differ in length.
Valid options are `audio` - store one video and all other languages as audio only; `video` - store the video + audio for every language; `auto` - detect if videos differ in length: if so, behave like `video` - otherwise like `audio`; `sync` - detect if videos differ in length: if so, it tries to find the offset of matching audio parts and removes the offset from the beginning, otherwise it behaves like `audio`.
Subtitles will always match the primary audio and video.
```shell
$ crunchy-cli archive -m audio https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default is `auto`.
- <span id="archive-merge-time-tolerance">Merge time tolerance</span>
Sometimes two video tracks are downloaded with `--merge` set to `auto` even if they only differ some milliseconds in length which shouldn't be noticeable to the viewer.
To prevent this, you can specify a range in milliseconds with the `--merge-time-tolerance` flag that only downloads one video if the length difference is in the given range.
```shell
$ crunchy-cli archive -m auto --merge-time-tolerance 100 https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default are `200` milliseconds.
- <span id="archive-merge-sync-tolerance">Merge sync tolerance</span>
Sometimes two video tracks are downloaded with `--merge` set to `sync` because the audio fingerprinting fails to identify matching audio parts (e.g. opening).
To prevent this, you can use the `--merge-sync-tolerance` flag to specify the difference by which two fingerprints are considered equal.
```shell
$ crunchy-cli archive -m sync --merge-sync-tolerance 3 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
Default is `6`.
- <span id="archive-merge-sync-precision">Merge sync precision</span>
If you use `--merge` set to `sync` and the syncing seems to be not accurate enough or takes to long, you can use the `--sync-precision` flag to specify the amount of offset determination runs from which the final offset is calculated.
```shell
$ crunchy-cli archive -m sync --merge-sync-precision 3 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
Default is `4`.
- <span id="archive-language-tagging">Language tagging</span>
You can force the usage of a specific language tagging in the output file with the `--language-tagging` flag.
This might be useful as some video players doesn't recognize the language tagging Crunchyroll uses internally.
```shell
$ crunchy-cli archive --language-tagging ietf https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
- <span id="archive-ffmpeg-preset">FFmpeg Preset</span>
You can specify specific built-in presets with the `--ffmpeg-preset` flag to convert videos to a specific coding while downloading.
Multiple predefined presets how videos should be encoded (h264, h265, av1, ...) are available, you can see them with `crunchy-cli archive --help`.
If you need more specific ffmpeg customizations you could either convert the output file manually or use ffmpeg output arguments as value for this flag.
```shell
$ crunchy-cli archive --ffmpeg-preset av1-lossless https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- <span id="archive-ffmpeg-threads">FFmpeg threads</span>
If you want to manually set how many threads FFmpeg should use, you can use the `--ffmpeg-threads` flag. This does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`.
```shell
$ crunchy-cli archive --ffmpeg-threads 4 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- <span id="archive-default-subtitle">Default subtitle</span>
`--default-subtitle` Set which subtitle language is to be flagged as **default** and **forced**.
```shell
$ crunchy-cli archive --default-subtitle en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default is none.
- <span id="archive-include-fonts">Include fonts</span>
You can include the fonts required by subtitles directly into the output file with the `--include-fonts` flag. This will use the embedded font for subtitles instead of the system font when playing the video in a video player which supports it.
```shell
$ crunchy-cli archive --include-fonts https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
- <span id="archive-include-chapters">Include chapters</span>
Crunchyroll sometimes provide information about skippable events like the intro or credits.
These information can be stored as chapters in the resulting video file via the `--include-chapters` flag.
This flag only works if `--merge` is set to `audio` because chapters cannot be mapped to a specific video steam.
```shell
$ crunchy-cli archive --include-chapters https://www.crunchyroll.com/watch/G0DUND0K2/the-journeys-end
```
- <span id="archive-skip-existing">Skip existing</span>
If you re-download a series but want to skip episodes you've already downloaded, the `--skip-existing` flag skips the already existing/downloaded files.
```shell
$ crunchy-cli archive --skip-existing https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
- <span id="archive-skip-existing-method">Skip existing method</span>
By default, already existing files are determined by their name and the download of the corresponding episode is skipped.
But sometimes Crunchyroll adds dubs or subs to an already existing episode and these changes aren't recognized and `--skip-existing` just skips it.
This behavior can be changed by the `--skip-existing-method` flag. Valid options are `audio` and `subtitle` (if the file already exists but the audio/subtitle are less from what should be downloaded, the episode gets downloaded and the file overwritten).
```shell
$ crunchy-cli archive --skip-existing-method audio --skip-existing-method video https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
- <span id="archive-skip-specials">Skip specials</span>
If you doesn't want to download special episodes, use the `--skip-specials` flag to skip the download of them.
```shell
$ crunchy-cli archive --skip-specials https://www.crunchyroll.com/series/GYZJ43JMR/that-time-i-got-reincarnated-as-a-slime[S2]
```
- <span id="archive-yes">Yes</span>
Sometimes different seasons have the same season number (e.g. Sword Art Online Alicization and Alicization War of Underworld are both marked as season 3), in such cases an interactive prompt is shown which needs user further user input to decide which season to download.
The `--yes` flag suppresses this interactive prompt and just downloads all seasons.
```shell
$ crunchy-cli archive --yes https://www.crunchyroll.com/series/GR49G9VP6/sword-art-online
```
If you've passed the `-q` / `--quiet` [global flag](#global-settings), this flag is automatically set.
- <span id="archive-threads">Threads</span>
To increase the download speed, video segments are downloaded simultaneously by creating multiple threads.
If you want to manually specify how many threads to use when downloading, do this with the `-t` / `--threads` flag.
```shell
$ crunchy-cli archive -t 1 https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
The default thread count is the count of cpu threads your pc has.
### Search
The `search` command is a powerful tool to query the Crunchyroll library.
It behaves like the regular search on the website but is able to further process the results and return everything it can find, from the series title down to the raw stream url.
_Using this command with the `--anonymous` flag or a non-premium account may return incomplete results._
**Supported urls/input**
- Single episode (with [episode filtering](#episode-filtering))
```shell
$ crunchy-cli search https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- Series (with [episode filtering](#episode-filtering))
```shell
$ crunchy-cli search https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
- Search input
```shell
$ crunchy-cli search "darling in the franxx"
```
**Options**
- <span id="search-audio">Audio</span>
Set the audio language to search via the `--audio` flag. Can be used multiple times.
```shell
$ crunchy-cli search --audio en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default is your system locale.
- <span id="search-result-limit">Result limit</span>
If your input is a search term instead of an url, you have multiple options to control which results to process.
The `--search-top-results-limit` flag sets the limit of top search results to process.
`--search-series-limit` sets the limit of only series, `--search-movie-listing-limit` of only movie listings, `--search-episode-limit` of only episodes and `--search-music-limit` of only concerts and music videos.
```shell
$ crunchy-cli search --search-top-results-limit 10 "darling in the franxx"
# only return series which have 'darling' in it. do not return top results which might also be non-series items
$ crunchy-cli search --search-top-results-limit 0 --search-series-limit 10 "darling"
# this returns 2 top results, 3 movie listings, 5 episodes and 1 music item as result
$ crunchy-cli search --search-top-results-limit 2 --search-movie-listing-limit 3 --search-episode-limit 5 --search-music-limit 1 "test"
```
Default is `5` for `--search-top-results-limit`, `0` for all others.
- Output template
The search command is designed to show only the specific information you want.
This is done with the `-o`/`--output` flag.
You can specify keywords in a specific pattern, and they will get replaced in the output text.
The required pattern for this begins with `{{`, then the keyword, and closes with `}}` (e.g. `{{episode.title}}`).
For example, if you want to get the title of an episode, you can use `Title: {{episode.title}}` and `{{episode.title}}` will be replaced with the episode title.
You can see all supported keywords with `crunchy-cli search --help`.
```shell
$ crunchy-cli search -o "{{series.title}}" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default is `S{{season.number}}E{{episode.number}} - {{episode.title}}`.
---
#### Output Template Options
You can use various template options to change how the filename is processed. The following tags are available:
- `{title}` → Title of the video
- `{series_name}` → Name of the series
- `{season_name}` → Name of the season
- `{audio}` → Audio language of the video
- `{width}` → Width of the video
- `{height}` → Height of the video
- `{season_number}` → Number of the season
- `{episode_number}` → Number of the episode
- `{relative_episode_number}` → Number of the episode relative to its season
- `{sequence_number}` → Like `{episode_number}` but without possible non-number characters
- `{relative_sequence_number}` → Like `{relative_episode_number}` but with support for episode 0's and .5's
- `{release_year}` → Release year of the video
- `{release_month}` → Release month of the video
- `{release_day} ` → Release day of the video
- `{series_id}` → ID of the series
- `{season_id}` → ID of the season
- `{episode_id}` → ID of the episode
Example:
Archive a file
```shell
$ crunchy archive https://www.crunchyroll.com/darling-in-the-franxx/darling-in-the-franxx/episode-1-alone-and-lonesome-759575
$ crunchy-cli archive -o "[S{season_number}E{episode_number}] {title}.mkv" https://www.crunchyroll.com/series/G8DHV7W21/dragon-ball
# Output file: '[S01E01] Secret of the Dragon Ball.mkv'
```
Downloads the first two episode of Darling in the FranXX and stores it compressed in a file.
```shell
$ crunchy archive -c "ditf.tar.gz" https://www.crunchyroll.com/darling-in-the-franxx/darling-in-the-franxx
#### Episode filtering
Filters patterns can be used to download a specific range of episodes from a single series.
A filter pattern may consist of either a season, an episode, or a combination of the two.
When used in combination, seasons `S` must be defined before episodes `E`.
There are many possible patterns, for example:
- `...[E5]` - Download the fifth episode.
- `...[S1]` - Download the whole first season.
- `...[-S2]` - Download the first two seasons.
- `...[S3E4-]` - Download everything from season three, episode four, onwards.
- `...[S1E4-S3]` - Download season one, starting at episode four, then download season two and three.
- `...[S3,S5]` - Download season three and five.
- `...[S1-S3,S4E2-S4E6]` - Download season one to three, then episodes two to six from season four.
In practice, it would look like this:
```
https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx[E1-E5]
```
##### Flags
# 📜 Disclaimer
The following flags can be (optional) passed to modify the [archive](#archive) process.
This tool is meant for private use only.
You need a [Crunchyroll Premium](https://www.crunchyroll.com/welcome#plans) subscription to access premium content.
| Short | Extended | Description |
|-------|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `-l` | `--language` | Audio locale which should be downloaded. Can be used multiple times. |
| `-d` | `--directory` | Directory to download the video(s) to. |
| `-o` | `--output` | Name of the output file. |
| `-m` | `--merge` | Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio', 'video'. See the [wiki](https://github.com/ByteDream/crunchyroll-go/wiki/Cli#archive) for more information. |
| `-c` | `--compress` | If is set, all output will be compresses into an archive. This flag sets the name of the compressed output file and the file ending specifies the compression algorithm (gzip, tar, zip are supported). |
| `-r` | `--resolution` | The resolution of the video(s). `best` for best resolution, `worst` for worst. |
| `-g` | `--goroutines` | Sets how many parallel segment downloads should be used. |
### Help
- General help
```shell
$ crunchy help
```
- Login help
```shell
$ crunchy help login
```
- Download help
```shell
$ crunchy help download
```
- Archive help
```shell
$ crunchy help archive
```
### Global flags
These flags you can use across every sub-command
| Flag | Description |
|------|------------------------------------------------------|
| `-q` | Disables all output. |
| `-v` | Shows additional debug output. |
| `-p` | Use a proxy to hide your ip / redirect your traffic. |
# 📚 Library
Download the library via `go get`
```shell
$ go get github.com/ByteDream/crunchyroll-go
```
The documentation is available on [pkg.go.dev](https://pkg.go.dev/github.com/ByteDream/crunchyroll-go).
Examples how to use the library and some features of it are described in the [wiki](https://github.com/ByteDream/crunchyroll-go/wiki/Library).
# ☝️ Disclaimer
This tool is **ONLY** meant to be used for private purposes.
To use this tool you need crunchyroll premium anyway, so there is no reason why rip and share the episodes.
**The responsibility for what happens to the downloaded videos lies entirely with the user who downloaded them.**
**You are entirely responsible for what happens when you use crunchy-cli.**
# ⚖ License
This project is licensed under the GNU Lesser General Public License v3.0 (LGPL-3.0) - see the [LICENSE](LICENSE) file for more details.
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for more details.

122
build.rs Normal file
View file

@ -0,0 +1,122 @@
use clap::{Command, CommandFactory};
use clap_complete::shells;
use std::path::{Path, PathBuf};
fn main() -> std::io::Result<()> {
let rustls_tls = cfg!(feature = "rustls-tls");
let native_tls = cfg!(feature = "native-tls");
let openssl_tls = cfg!(any(feature = "openssl-tls", feature = "openssl-tls-static"));
if rustls_tls as u8 + native_tls as u8 + openssl_tls as u8 > 1 {
let active_tls_backend = if openssl_tls {
"openssl"
} else if native_tls {
"native tls"
} else {
"rustls"
};
println!("cargo:warning=Multiple tls backends are activated (through the '*-tls' features). Consider to activate only one as it is not possible to change the backend during runtime. The active backend for this build will be '{}'.", active_tls_backend)
}
// note that we're using an anti-pattern here / violate the rust conventions. build script are
// not supposed to write outside of 'OUT_DIR'. to have the generated files in the build "root"
// (the same directory where the output binary lives) is much simpler than in 'OUT_DIR' since
// its nested in sub directories and is difficult to find (at least more difficult than in the
// build root)
let unconventional_out_dir =
std::path::PathBuf::from(std::env::var_os("OUT_DIR").ok_or(std::io::ErrorKind::NotFound)?)
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap()
.to_path_buf();
let completions_dir = exist_or_create_dir(unconventional_out_dir.join("completions"))?;
let manpage_dir = exist_or_create_dir(unconventional_out_dir.join("manpages"))?;
generate_completions(completions_dir)?;
generate_manpages(manpage_dir)?;
Ok(())
}
fn exist_or_create_dir(path: PathBuf) -> std::io::Result<PathBuf> {
if !path.exists() {
std::fs::create_dir(path.clone())?
}
Ok(path)
}
fn generate_completions(out_dir: PathBuf) -> std::io::Result<()> {
let mut command: Command = crunchy_cli_core::Cli::command();
clap_complete::generate_to(
shells::Bash,
&mut command.clone(),
"crunchy-cli",
out_dir.clone(),
)?;
clap_complete::generate_to(
shells::Elvish,
&mut command.clone(),
"crunchy-cli",
out_dir.clone(),
)?;
println!(
"{}",
clap_complete::generate_to(
shells::Fish,
&mut command.clone(),
"crunchy-cli",
out_dir.clone(),
)?
.to_string_lossy()
);
clap_complete::generate_to(
shells::PowerShell,
&mut command.clone(),
"crunchy-cli",
out_dir.clone(),
)?;
clap_complete::generate_to(shells::Zsh, &mut command, "crunchy-cli", out_dir)?;
Ok(())
}
fn generate_manpages(out_dir: PathBuf) -> std::io::Result<()> {
fn generate_command_manpage(
mut command: Command,
base_path: &Path,
sub_name: &str,
) -> std::io::Result<()> {
let (file_name, title) = if sub_name.is_empty() {
command = command.name("crunchy-cli");
("crunchy-cli.1".to_string(), "crunchy-cli".to_string())
} else {
command = command.name(format!("crunchy-cli {}", sub_name));
(
format!("crunchy-cli-{}.1", sub_name),
format!("crunchy-cli-{}", sub_name),
)
};
let mut command_buf = vec![];
let man = clap_mangen::Man::new(command)
.title(title)
.date(chrono::Utc::now().format("%b %d, %Y").to_string());
man.render(&mut command_buf)?;
std::fs::write(base_path.join(file_name), command_buf)
}
generate_command_manpage(crunchy_cli_core::Cli::command(), &out_dir, "")?;
generate_command_manpage(crunchy_cli_core::Archive::command(), &out_dir, "archive")?;
generate_command_manpage(crunchy_cli_core::Download::command(), &out_dir, "download")?;
generate_command_manpage(crunchy_cli_core::Login::command(), &out_dir, "login")?;
generate_command_manpage(crunchy_cli_core::Search::command(), &out_dir, "search")?;
Ok(())
}

View file

@ -1,805 +0,0 @@
package cmd
import (
"archive/tar"
"archive/zip"
"bufio"
"bytes"
"compress/gzip"
"context"
"fmt"
"github.com/ByteDream/crunchyroll-go"
"github.com/ByteDream/crunchyroll-go/utils"
"github.com/grafov/m3u8"
"github.com/spf13/cobra"
"io"
"os"
"os/exec"
"os/signal"
"path/filepath"
"regexp"
"runtime"
"sort"
"strconv"
"strings"
"sync"
"time"
)
var (
archiveLanguagesFlag []string
archiveDirectoryFlag string
archiveOutputFlag string
archiveMergeFlag string
archiveCompressFlag string
archiveResolutionFlag string
archiveGoroutinesFlag int
)
var archiveCmd = &cobra.Command{
Use: "archive",
Short: "Stores the given videos with all subtitles and multiple audios in a .mkv file",
Args: cobra.MinimumNArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
out.Debug("Validating arguments")
if !hasFFmpeg() {
return fmt.Errorf("ffmpeg is needed to run this command correctly")
}
out.Debug("FFmpeg detected")
if filepath.Ext(archiveOutputFlag) != ".mkv" {
return fmt.Errorf("currently only matroska / .mkv files are supported")
}
for _, locale := range archiveLanguagesFlag {
if !utils.ValidateLocale(crunchyroll.LOCALE(locale)) {
// if locale is 'all', match all known locales
if locale == "all" {
archiveLanguagesFlag = allLocalesAsStrings()
break
}
return fmt.Errorf("%s is not a valid locale. Choose from: %s", locale, strings.Join(allLocalesAsStrings(), ", "))
}
}
out.Debug("Using following audio locales: %s", strings.Join(archiveLanguagesFlag, ", "))
var found bool
for _, mode := range []string{"auto", "audio", "video"} {
if archiveMergeFlag == mode {
out.Debug("Using %s merge behavior", archiveMergeFlag)
found = true
break
}
}
if !found {
return fmt.Errorf("'%s' is no valid merge flag. Use 'auto', 'audio' or 'video'", archiveMergeFlag)
}
if archiveCompressFlag != "" {
found = false
for _, algo := range []string{".tar", ".tar.gz", ".tgz", ".zip"} {
if strings.HasSuffix(archiveCompressFlag, algo) {
out.Debug("Using %s compression", algo)
found = true
break
}
}
if !found {
return fmt.Errorf("'%s' is no valid compress algorithm. Valid algorithms / file endings are '.tar', '.tar.gz', '.zip'",
archiveCompressFlag)
}
}
switch archiveResolutionFlag {
case "1080p", "720p", "480p", "360p", "240p":
intRes, _ := strconv.ParseFloat(strings.TrimSuffix(archiveResolutionFlag, "p"), 84)
archiveResolutionFlag = fmt.Sprintf("%dx%s", int(intRes*(16/9)), strings.TrimSuffix(archiveResolutionFlag, "p"))
case "1920x1080", "1280x720", "640x480", "480x360", "428x240", "best", "worst":
default:
return fmt.Errorf("'%s' is not a valid resolution", archiveResolutionFlag)
}
out.Debug("Using resolution '%s'", archiveResolutionFlag)
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
loadCrunchy()
return archive(args)
},
}
func init() {
archiveCmd.Flags().StringSliceVarP(&archiveLanguagesFlag,
"language",
"l",
[]string{string(systemLocale(false)), string(crunchyroll.JP)},
"Audio locale which should be downloaded. Can be used multiple times")
cwd, _ := os.Getwd()
archiveCmd.Flags().StringVarP(&archiveDirectoryFlag,
"directory",
"d",
cwd,
"The directory to store the files into")
archiveCmd.Flags().StringVarP(&archiveOutputFlag,
"output",
"o",
"{title}.mkv",
"Name of the output file. If you use the following things in the name, the will get replaced:\n"+
"\t{title} » Title of the video\n"+
"\t{series_name} » Name of the series\n"+
"\t{season_name} » Name of the season\n"+
"\t{season_number} » Number of the season\n"+
"\t{episode_number} » Number of the episode\n"+
"\t{resolution} » Resolution of the video\n"+
"\t{fps} » Frame Rate of the video\n"+
"\t{audio} » Audio locale of the video\n"+
"\t{subtitle} » Subtitle locale of the video")
archiveCmd.Flags().StringVarP(&archiveMergeFlag,
"merge",
"m",
"auto",
"Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio', 'video'")
archiveCmd.Flags().StringVarP(&archiveCompressFlag,
"compress",
"c",
"",
"If is set, all output will be compresses into an archive (every url generates a new one). "+
"This flag sets the name of the compressed output file. The file ending specifies the compression algorithm. "+
"The following algorithms are supported: gzip, tar, zip")
archiveCmd.Flags().StringVarP(&archiveResolutionFlag,
"resolution",
"r",
"best",
"The video resolution. Can either be specified via the pixels, the abbreviation for pixels, or 'common-use' words\n"+
"\tAvailable pixels: 1920x1080, 1280x720, 640x480, 480x360, 428x240\n"+
"\tAvailable abbreviations: 1080p, 720p, 480p, 360p, 240p\n"+
"\tAvailable common-use words: best (best available resolution), worst (worst available resolution)")
archiveCmd.Flags().IntVarP(&archiveGoroutinesFlag,
"goroutines",
"g",
runtime.NumCPU(),
"Number of parallel segment downloads")
rootCmd.AddCommand(archiveCmd)
}
func archive(urls []string) error {
for i, url := range urls {
out.SetProgress("Parsing url %d", i+1)
episodes, err := archiveExtractEpisodes(url)
if err != nil {
out.StopProgress("Failed to parse url %d", i+1)
return err
}
out.StopProgress("Parsed url %d", i+1)
var compressFile *os.File
var c compress
if archiveCompressFlag != "" {
compressFile, err = os.Create(generateFilename(archiveCompressFlag, ""))
if err != nil {
return fmt.Errorf("failed to create archive file: %v", err)
}
if strings.HasSuffix(archiveCompressFlag, ".tar") {
c = newTarCompress(compressFile)
} else if strings.HasSuffix(archiveCompressFlag, ".tar.gz") || strings.HasSuffix(archiveCompressFlag, ".tgz") {
c = newGzipCompress(compressFile)
} else if strings.HasSuffix(archiveCompressFlag, ".zip") {
c = newZipCompress(compressFile)
}
}
for _, season := range episodes {
out.Info("%s Season %d", season[0].SeriesName, season[0].SeasonNumber)
for j, info := range season {
out.Info("\t%d. %s » %spx, %.2f FPS (S%02dE%02d)",
j+1,
info.Title,
info.Resolution,
info.FPS,
info.SeasonNumber,
info.EpisodeNumber)
}
}
out.Empty()
for _, season := range episodes {
for _, info := range season {
var filename string
var writeCloser io.WriteCloser
if c != nil {
filename = info.Format(archiveOutputFlag)
writeCloser, err = c.NewFile(info)
if err != nil {
return fmt.Errorf("failed to pre generate new archive file: %v", err)
}
} else {
dir := info.Format(downloadDirectoryFlag)
if _, err = os.Stat(dir); os.IsNotExist(err) {
if err = os.MkdirAll(dir, 0777); err != nil {
return fmt.Errorf("error while creating directory: %v", err)
}
}
filename = generateFilename(info.Format(archiveOutputFlag), dir)
writeCloser, err = os.Create(filename)
if err != nil {
return fmt.Errorf("failed to create new file: %v", err)
}
}
if err = archiveInfo(info, writeCloser, filename); err != nil {
writeCloser.Close()
if f, ok := writeCloser.(*os.File); ok {
os.Remove(f.Name())
} else {
c.Close()
compressFile.Close()
os.RemoveAll(compressFile.Name())
}
return err
}
writeCloser.Close()
}
}
if c != nil {
c.Close()
}
if compressFile != nil {
compressFile.Close()
}
}
return nil
}
func archiveInfo(info formatInformation, writeCloser io.WriteCloser, filename string) error {
out.Debug("Entering season %d, episode %d with %d additional formats", info.SeasonNumber, info.EpisodeNumber, len(info.additionalFormats))
downloadProgress, err := createArchiveProgress(info)
if err != nil {
return fmt.Errorf("error while setting up downloader: %v", err)
}
defer func() {
if downloadProgress.Total != downloadProgress.Current {
fmt.Println()
}
}()
rootFile, err := os.CreateTemp("", fmt.Sprintf("%s_*.ts", strings.TrimSuffix(filepath.Base(filename), filepath.Ext(filename))))
if err != nil {
return fmt.Errorf("failed to create temp file: %v", err)
}
defer os.Remove(rootFile.Name())
defer rootFile.Close()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
downloader := crunchyroll.NewDownloader(ctx, rootFile, downloadGoroutinesFlag, func(segment *m3u8.MediaSegment, current, total int, file *os.File) error {
// check if the context was cancelled.
// must be done in to not print any progress messages if ctrl+c was pressed
if ctx.Err() != nil {
return nil
}
if out.IsDev() {
downloadProgress.UpdateMessage(fmt.Sprintf("Downloading %d/%d (%.2f%%) » %s", current, total, float32(current)/float32(total)*100, segment.URI), false)
} else {
downloadProgress.Update()
}
if current == total {
downloadProgress.UpdateMessage("Merging segments", false)
}
return nil
})
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
go func() {
select {
case <-sig:
signal.Stop(sig)
out.Exit("Exiting... (may take a few seconds)")
out.Exit("To force exit press ctrl+c (again)")
cancel()
// os.Exit(1) is not called since an immediate exit after the cancel function does not let
// the download process enough time to stop gratefully. A result of this is that the temporary
// directory where the segments are downloaded to will not be deleted
case <-ctx.Done():
// this is just here to end the goroutine and prevent it from running forever without a reason
}
}()
out.Debug("Set up signal catcher")
var additionalDownloaderOpts []string
var mergeMessage string
switch archiveMergeFlag {
case "auto":
additionalDownloaderOpts = []string{"-vn"}
for _, format := range info.additionalFormats {
if format.Video.Bandwidth != info.format.Video.Bandwidth {
// revoke the changed FFmpegOpts above
additionalDownloaderOpts = []string{}
break
}
}
if len(additionalDownloaderOpts) > 0 {
mergeMessage = "merging audio for additional formats"
} else {
mergeMessage = "merging video for additional formats"
}
case "audio":
additionalDownloaderOpts = []string{"-vn"}
mergeMessage = "merging audio for additional formats"
case "video":
mergeMessage = "merging video for additional formats"
}
out.Info("Downloading episode `%s` to `%s` (%s)", info.Title, filepath.Base(filename), mergeMessage)
out.Info("\tEpisode: S%02dE%02d", info.SeasonNumber, info.EpisodeNumber)
out.Info("\tAudio: %s", info.Audio)
out.Info("\tSubtitle: %s", info.Subtitle)
out.Info("\tResolution: %spx", info.Resolution)
out.Info("\tFPS: %.2f", info.FPS)
var videoFiles, audioFiles, subtitleFiles []string
defer func() {
for _, f := range append(append(videoFiles, audioFiles...), subtitleFiles...) {
os.RemoveAll(f)
}
}()
var f []string
if f, err = archiveDownloadVideos(downloader, filepath.Base(filename), true, info.format); err != nil {
if err != ctx.Err() {
return fmt.Errorf("error while downloading: %v", err)
}
return err
}
videoFiles = append(videoFiles, f[0])
if len(additionalDownloaderOpts) == 0 {
var videos []string
downloader.FFmpegOpts = additionalDownloaderOpts
if videos, err = archiveDownloadVideos(downloader, filepath.Base(filename), true, info.additionalFormats...); err != nil {
return fmt.Errorf("error while downloading additional videos: %v", err)
}
downloader.FFmpegOpts = []string{}
videoFiles = append(videoFiles, videos...)
} else {
var audios []string
if audios, err = archiveDownloadVideos(downloader, filepath.Base(filename), false, info.additionalFormats...); err != nil {
return fmt.Errorf("error while downloading additional videos: %v", err)
}
audioFiles = append(audioFiles, audios...)
}
sort.Sort(utils.SubtitlesByLocale(info.format.Subtitles))
sortSubtitles, _ := strconv.ParseBool(os.Getenv("SORT_SUBTITLES"))
if sortSubtitles && len(archiveLanguagesFlag) > 0 {
// this sort the subtitle locales after the languages which were specified
// with the `archiveLanguagesFlag` flag
for _, language := range archiveLanguagesFlag {
for i, subtitle := range info.format.Subtitles {
if subtitle.Locale == crunchyroll.LOCALE(language) {
info.format.Subtitles = append([]*crunchyroll.Subtitle{subtitle}, append(info.format.Subtitles[:i], info.format.Subtitles[i+1:]...)...)
break
}
}
}
}
var subtitles []string
if subtitles, err = archiveDownloadSubtitles(filepath.Base(filename), info.format.Subtitles...); err != nil {
return fmt.Errorf("error while downloading subtitles: %v", err)
}
subtitleFiles = append(subtitleFiles, subtitles...)
if err = archiveFFmpeg(ctx, writeCloser, videoFiles, audioFiles, subtitleFiles); err != nil {
return fmt.Errorf("failed to merge files: %v", err)
}
downloadProgress.UpdateMessage("Download finished", false)
signal.Stop(sig)
out.Debug("Stopped signal catcher")
out.Empty()
out.Empty()
return nil
}
func createArchiveProgress(info formatInformation) (*downloadProgress, error) {
var progressCount int
if err := info.format.InitVideo(); err != nil {
return nil, fmt.Errorf("error while initializing a video: %v", err)
}
// + number of segments a video has +1 is for merging
progressCount += int(info.format.Video.Chunklist.Count()) + 1
for _, f := range info.additionalFormats {
if f == info.format {
continue
}
if err := f.InitVideo(); err != nil {
return nil, err
}
// + number of segments a video has +1 is for merging
progressCount += int(f.Video.Chunklist.Count()) + 1
}
downloadProgress := &downloadProgress{
Prefix: out.InfoLog.Prefix(),
Message: "Downloading video",
// number of segments a video +1 is for the success message
Total: progressCount + 1,
Dev: out.IsDev(),
Quiet: out.IsQuiet(),
}
if out.IsDev() {
downloadProgress.Prefix = out.DebugLog.Prefix()
}
return downloadProgress, nil
}
func archiveDownloadVideos(downloader crunchyroll.Downloader, filename string, video bool, formats ...*crunchyroll.Format) ([]string, error) {
var files []string
for _, format := range formats {
var name string
if video {
name = fmt.Sprintf("%s_%s_video_*.ts", filename, format.AudioLocale)
} else {
name = fmt.Sprintf("%s_%s_audio_*.aac", filename, format.AudioLocale)
}
f, err := os.CreateTemp("", name)
if err != nil {
return nil, err
}
files = append(files, f.Name())
downloader.Writer = f
if err = format.Download(downloader); err != nil {
f.Close()
for _, file := range files {
os.Remove(file)
}
return nil, err
}
f.Close()
out.Debug("Downloaded '%s' video", format.AudioLocale)
}
return files, nil
}
func archiveDownloadSubtitles(filename string, subtitles ...*crunchyroll.Subtitle) ([]string, error) {
var files []string
for _, subtitle := range subtitles {
f, err := os.CreateTemp("", fmt.Sprintf("%s_%s_subtitle_*.ass", filename, subtitle.Locale))
if err != nil {
return nil, err
}
files = append(files, f.Name())
if err := subtitle.Save(f); err != nil {
f.Close()
for _, file := range files {
os.Remove(file)
}
return nil, err
}
f.Close()
out.Debug("Downloaded '%s' subtitles", subtitle.Locale)
}
return files, nil
}
func archiveFFmpeg(ctx context.Context, dst io.Writer, videoFiles, audioFiles, subtitleFiles []string) error {
var input, maps, metadata []string
re := regexp.MustCompile(`(?m)_([a-z]{2}-([A-Z]{2}|[0-9]{3}))_(video|audio|subtitle)`)
for i, video := range videoFiles {
input = append(input, "-i", video)
maps = append(maps, "-map", strconv.Itoa(i))
locale := crunchyroll.LOCALE(re.FindStringSubmatch(video)[1])
metadata = append(metadata, fmt.Sprintf("-metadata:s:v:%d", i), fmt.Sprintf("language=%s", utils.LocaleLanguage(locale)))
metadata = append(metadata, fmt.Sprintf("-metadata:s:a:%d", i), fmt.Sprintf("language=%s", utils.LocaleLanguage(locale)))
}
for i, audio := range audioFiles {
input = append(input, "-i", audio)
maps = append(maps, "-map", strconv.Itoa(i+len(videoFiles)))
locale := crunchyroll.LOCALE(re.FindStringSubmatch(audio)[1])
metadata = append(metadata, fmt.Sprintf("-metadata:s:a:%d", i), fmt.Sprintf("language=%s", utils.LocaleLanguage(locale)))
}
for i, subtitle := range subtitleFiles {
input = append(input, "-i", subtitle)
maps = append(maps, "-map", strconv.Itoa(i+len(videoFiles)+len(audioFiles)))
locale := crunchyroll.LOCALE(re.FindStringSubmatch(subtitle)[1])
metadata = append(metadata, fmt.Sprintf("-metadata:s:s:%d", i), fmt.Sprintf("title=%s", utils.LocaleLanguage(locale)))
}
commandOptions := []string{"-y"}
commandOptions = append(commandOptions, input...)
commandOptions = append(commandOptions, maps...)
commandOptions = append(commandOptions, metadata...)
// we have to create a temporary file here because it must be seekable
// for ffmpeg.
// ffmpeg could write to dst too, but this would require to re-encode
// the audio which results in much higher time and resource consumption
// (0-1 second with the temp file, ~20 seconds with re-encoding on my system)
file, err := os.CreateTemp("", "")
if err != nil {
return err
}
file.Close()
defer os.Remove(file.Name())
commandOptions = append(commandOptions, "-c", "copy", "-f", "matroska", file.Name())
// just a little nicer debug output to copy and paste the ffmpeg for debug reasons
if out.IsDev() {
var debugOptions []string
for _, option := range commandOptions {
if strings.HasPrefix(option, "title=") {
debugOptions = append(debugOptions, "title=\""+strings.TrimPrefix(option, "title=")+"\"")
} else if strings.HasPrefix(option, "language=") {
debugOptions = append(debugOptions, "language=\""+strings.TrimPrefix(option, "language=")+"\"")
} else if strings.Contains(option, " ") {
debugOptions = append(debugOptions, "\""+option+"\"")
} else {
debugOptions = append(debugOptions, option)
}
}
out.Debug("FFmpeg merge command: ffmpeg %s", strings.Join(debugOptions, " "))
}
var errBuf bytes.Buffer
cmd := exec.CommandContext(ctx, "ffmpeg", commandOptions...)
cmd.Stderr = &errBuf
if err = cmd.Run(); err != nil {
return fmt.Errorf(errBuf.String())
}
file, err = os.Open(file.Name())
if err != nil {
return err
}
defer file.Close()
_, err = bufio.NewWriter(dst).ReadFrom(file)
return err
}
func archiveExtractEpisodes(url string) ([][]formatInformation, error) {
var hasJapanese bool
languagesAsLocale := []crunchyroll.LOCALE{crunchyroll.JP}
for _, language := range archiveLanguagesFlag {
locale := crunchyroll.LOCALE(language)
if locale == crunchyroll.JP {
hasJapanese = true
} else {
languagesAsLocale = append(languagesAsLocale, locale)
}
}
episodes, err := extractEpisodes(url, languagesAsLocale...)
if err != nil {
return nil, err
}
if !hasJapanese && len(episodes[1:]) == 0 {
return nil, fmt.Errorf("no episodes found")
}
for i, eps := range episodes {
if len(eps) == 0 {
out.SetProgress("%s has no matching episodes", languagesAsLocale[i])
} else if len(episodes[0]) > len(eps) {
out.SetProgress("%s has %d less episodes than existing in japanese (%d)", languagesAsLocale[i], len(episodes[0])-len(eps), len(episodes[0]))
}
}
if !hasJapanese {
episodes = episodes[1:]
}
eps := make(map[int]map[int]*formatInformation)
for _, lang := range episodes {
for _, season := range utils.SortEpisodesBySeason(lang) {
if _, ok := eps[season[0].SeasonNumber]; !ok {
eps[season[0].SeasonNumber] = map[int]*formatInformation{}
}
for _, episode := range season {
format, err := episode.GetFormat(archiveResolutionFlag, "", false)
if err != nil {
return nil, fmt.Errorf("error while receiving format for %s: %v", episode.Title, err)
}
if _, ok := eps[episode.SeasonNumber][episode.EpisodeNumber]; !ok {
eps[episode.SeasonNumber][episode.EpisodeNumber] = &formatInformation{
format: format,
additionalFormats: make([]*crunchyroll.Format, 0),
Title: episode.Title,
SeriesName: episode.SeriesTitle,
SeasonName: episode.SeasonTitle,
SeasonNumber: episode.SeasonNumber,
EpisodeNumber: episode.EpisodeNumber,
Resolution: format.Video.Resolution,
FPS: format.Video.FrameRate,
Audio: format.AudioLocale,
}
} else {
eps[episode.SeasonNumber][episode.EpisodeNumber].additionalFormats = append(eps[episode.SeasonNumber][episode.EpisodeNumber].additionalFormats, format)
}
}
}
}
var infoFormat [][]formatInformation
for _, e := range eps {
var tmpFormatInfo []formatInformation
var keys []int
for episodeNumber := range e {
keys = append(keys, episodeNumber)
}
sort.Ints(keys)
for _, key := range keys {
tmpFormatInfo = append(tmpFormatInfo, *e[key])
}
infoFormat = append(infoFormat, tmpFormatInfo)
}
return infoFormat, nil
}
type compress interface {
io.Closer
NewFile(information formatInformation) (io.WriteCloser, error)
}
func newGzipCompress(file *os.File) *tarCompress {
gw := gzip.NewWriter(file)
return &tarCompress{
parent: gw,
dst: tar.NewWriter(gw),
}
}
func newTarCompress(file *os.File) *tarCompress {
return &tarCompress{
dst: tar.NewWriter(file),
}
}
type tarCompress struct {
compress
wg sync.WaitGroup
parent *gzip.Writer
dst *tar.Writer
}
func (tc *tarCompress) Close() error {
// we have to wait here in case the actual content isn't copied completely into the
// writer yet
tc.wg.Wait()
var err, err2 error
if tc.parent != nil {
err2 = tc.parent.Close()
}
err = tc.dst.Close()
if err != nil && err2 != nil {
// best way to show double errors at once that i've found
return fmt.Errorf("%v\n%v", err, err2)
} else if err == nil && err2 != nil {
err = err2
}
return err
}
func (tc *tarCompress) NewFile(information formatInformation) (io.WriteCloser, error) {
rp, wp := io.Pipe()
go func() {
tc.wg.Add(1)
defer tc.wg.Done()
var buf bytes.Buffer
io.Copy(&buf, rp)
header := &tar.Header{
Name: filepath.Join(fmt.Sprintf("S%2d", information.SeasonNumber), information.Title),
ModTime: time.Now(),
Mode: 0644,
Typeflag: tar.TypeReg,
// fun fact: i did not set the size for quiet some time because i thought that it isn't
// required. well because of this i debugged this part for multiple hours because without
// proper size information only a tiny amount gets copied into the tar (or zip) writer.
// this is also the reason why the file content is completely copied into a buffer before
// writing it to the writer. i could bypass this and save some memory but this requires
// some rewriting and im nearly at the (planned) finish for version 2 so nah in the future
// maybe
Size: int64(buf.Len()),
}
tc.dst.WriteHeader(header)
io.Copy(tc.dst, &buf)
}()
return wp, nil
}
func newZipCompress(file *os.File) *zipCompress {
return &zipCompress{
dst: zip.NewWriter(file),
}
}
type zipCompress struct {
compress
wg sync.WaitGroup
dst *zip.Writer
}
func (zc *zipCompress) Close() error {
zc.wg.Wait()
return zc.dst.Close()
}
func (zc *zipCompress) NewFile(information formatInformation) (io.WriteCloser, error) {
rp, wp := io.Pipe()
go func() {
zc.wg.Add(1)
defer zc.wg.Done()
var buf bytes.Buffer
io.Copy(&buf, rp)
header := &zip.FileHeader{
Name: filepath.Join(fmt.Sprintf("S%2d", information.SeasonNumber), information.Title),
Modified: time.Now(),
Method: zip.Deflate,
UncompressedSize64: uint64(buf.Len()),
}
header.SetMode(0644)
hw, _ := zc.dst.CreateHeader(header)
io.Copy(hw, &buf)
}()
return wp, nil
}

View file

@ -1,325 +0,0 @@
package cmd
import (
"context"
"fmt"
"github.com/ByteDream/crunchyroll-go"
"github.com/ByteDream/crunchyroll-go/utils"
"github.com/grafov/m3u8"
"github.com/spf13/cobra"
"os"
"os/signal"
"path/filepath"
"runtime"
"sort"
"strconv"
"strings"
)
var (
downloadAudioFlag string
downloadSubtitleFlag string
downloadDirectoryFlag string
downloadOutputFlag string
downloadResolutionFlag string
downloadGoroutinesFlag int
)
var getCmd = &cobra.Command{
Use: "download",
Short: "Download a video",
Args: cobra.MinimumNArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
out.Debug("Validating arguments")
if filepath.Ext(downloadOutputFlag) != ".ts" {
if !hasFFmpeg() {
return fmt.Errorf("the file ending for the output file (%s) is not `.ts`. "+
"Install ffmpeg (https://ffmpeg.org/download.html) to use other media file endings (e.g. `.mp4`)", downloadOutputFlag)
} else {
out.Debug("Custom file ending '%s' (ffmpeg is installed)", filepath.Ext(downloadOutputFlag))
}
}
if !utils.ValidateLocale(crunchyroll.LOCALE(downloadAudioFlag)) {
return fmt.Errorf("%s is not a valid audio locale. Choose from: %s", downloadAudioFlag, strings.Join(allLocalesAsStrings(), ", "))
} else if downloadSubtitleFlag != "" && !utils.ValidateLocale(crunchyroll.LOCALE(downloadSubtitleFlag)) {
return fmt.Errorf("%s is not a valid subtitle locale. Choose from: %s", downloadSubtitleFlag, strings.Join(allLocalesAsStrings(), ", "))
}
out.Debug("Locales: audio: %s / subtitle: %s", downloadAudioFlag, downloadSubtitleFlag)
switch downloadResolutionFlag {
case "1080p", "720p", "480p", "360p", "240p":
intRes, _ := strconv.ParseFloat(strings.TrimSuffix(downloadResolutionFlag, "p"), 84)
downloadResolutionFlag = fmt.Sprintf("%dx%s", int(intRes*(16/9)), strings.TrimSuffix(downloadResolutionFlag, "p"))
case "1920x1080", "1280x720", "640x480", "480x360", "428x240", "best", "worst":
default:
return fmt.Errorf("'%s' is not a valid resolution", downloadResolutionFlag)
}
out.Debug("Using resolution '%s'", downloadResolutionFlag)
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
loadCrunchy()
return download(args)
},
}
func init() {
getCmd.Flags().StringVarP(&downloadAudioFlag, "audio",
"a",
string(systemLocale(false)),
"The locale of the audio. Available locales: "+strings.Join(allLocalesAsStrings(), ", "))
getCmd.Flags().StringVarP(&downloadSubtitleFlag,
"subtitle",
"s",
"",
"The locale of the subtitle. Available locales: "+strings.Join(allLocalesAsStrings(), ", "))
cwd, _ := os.Getwd()
getCmd.Flags().StringVarP(&downloadDirectoryFlag,
"directory",
"d",
cwd,
"The directory to download the file(s) into")
getCmd.Flags().StringVarP(&downloadOutputFlag,
"output",
"o",
"{title}.ts",
"Name of the output file. "+
"If you use the following things in the name, the will get replaced:\n"+
"\t{title} » Title of the video\n"+
"\t{series_name} » Name of the series\n"+
"\t{season_name} » Name of the season\n"+
"\t{season_number} » Number of the season\n"+
"\t{episode_number} » Number of the episode\n"+
"\t{resolution} » Resolution of the video\n"+
"\t{fps} » Frame Rate of the video\n"+
"\t{audio} » Audio locale of the video\n"+
"\t{subtitle} » Subtitle locale of the video")
getCmd.Flags().StringVarP(&downloadResolutionFlag,
"resolution",
"r",
"best",
"The video resolution. Can either be specified via the pixels, the abbreviation for pixels, or 'common-use' words\n"+
"\tAvailable pixels: 1920x1080, 1280x720, 640x480, 480x360, 428x240\n"+
"\tAvailable abbreviations: 1080p, 720p, 480p, 360p, 240p\n"+
"\tAvailable common-use words: best (best available resolution), worst (worst available resolution)")
getCmd.Flags().IntVarP(&downloadGoroutinesFlag,
"goroutines",
"g",
runtime.NumCPU(),
"Sets how many parallel segment downloads should be used")
rootCmd.AddCommand(getCmd)
}
func download(urls []string) error {
for i, url := range urls {
out.SetProgress("Parsing url %d", i+1)
episodes, err := downloadExtractEpisodes(url)
if err != nil {
out.StopProgress("Failed to parse url %d", i+1)
return err
}
out.StopProgress("Parsed url %d", i+1)
for _, season := range episodes {
out.Info("%s Season %d", season[0].SeriesName, season[0].SeasonNumber)
for j, info := range season {
out.Info("\t%d. %s » %spx, %.2f FPS (S%02dE%02d)",
j+1,
info.Title,
info.Resolution,
info.FPS,
info.SeasonNumber,
info.EpisodeNumber)
}
}
out.Empty()
for _, season := range episodes {
for _, info := range season {
dir := info.Format(downloadDirectoryFlag)
if _, err = os.Stat(dir); os.IsNotExist(err) {
if err = os.MkdirAll(dir, 0777); err != nil {
return fmt.Errorf("error while creating directory: %v", err)
}
}
file, err := os.Create(generateFilename(info.Format(downloadOutputFlag), dir))
if err != nil {
return fmt.Errorf("failed to create output file: %v", err)
}
if err = downloadInfo(info, file); err != nil {
file.Close()
os.Remove(file.Name())
return err
}
file.Close()
}
}
}
return nil
}
func downloadInfo(info formatInformation, file *os.File) error {
out.Debug("Entering season %d, episode %d", info.SeasonNumber, info.EpisodeNumber)
if err := info.format.InitVideo(); err != nil {
return fmt.Errorf("error while initializing the video: %v", err)
}
downloadProgress := &downloadProgress{
Prefix: out.InfoLog.Prefix(),
Message: "Downloading video",
// number of segments a video has +2 is for merging and the success message
Total: int(info.format.Video.Chunklist.Count()) + 2,
Dev: out.IsDev(),
Quiet: out.IsQuiet(),
}
if out.IsDev() {
downloadProgress.Prefix = out.DebugLog.Prefix()
}
defer func() {
if downloadProgress.Total != downloadProgress.Current {
fmt.Println()
}
}()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
downloader := crunchyroll.NewDownloader(ctx, file, downloadGoroutinesFlag, func(segment *m3u8.MediaSegment, current, total int, file *os.File) error {
// check if the context was cancelled.
// must be done in to not print any progress messages if ctrl+c was pressed
if ctx.Err() != nil {
return nil
}
if out.IsDev() {
downloadProgress.UpdateMessage(fmt.Sprintf("Downloading %d/%d (%.2f%%) » %s", current, total, float32(current)/float32(total)*100, segment.URI), false)
} else {
downloadProgress.Update()
}
if current == total {
downloadProgress.UpdateMessage("Merging segments", false)
}
return nil
})
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
go func() {
select {
case <-sig:
signal.Stop(sig)
out.Exit("Exiting... (may take a few seconds)")
out.Exit("To force exit press ctrl+c (again)")
cancel()
// os.Exit(1) is not called because an immediate exit after the cancel function does not let
// the download process enough time to stop gratefully. A result of this is that the temporary
// directory where the segments are downloaded to will not be deleted
case <-ctx.Done():
// this is just here to end the goroutine and prevent it from running forever without a reason
}
}()
out.Debug("Set up signal catcher")
out.Info("Downloading episode `%s` to `%s`", info.Title, filepath.Base(file.Name()))
out.Info("\tEpisode: S%02dE%02d", info.SeasonNumber, info.EpisodeNumber)
out.Info("\tAudio: %s", info.Audio)
out.Info("\tSubtitle: %s", info.Subtitle)
out.Info("\tResolution: %spx", info.Resolution)
out.Info("\tFPS: %.2f", info.FPS)
if err := info.format.Download(downloader); err != nil {
return fmt.Errorf("error while downloading: %v", err)
}
downloadProgress.UpdateMessage("Download finished", false)
signal.Stop(sig)
out.Debug("Stopped signal catcher")
out.Empty()
out.Empty()
return nil
}
func downloadExtractEpisodes(url string) ([][]formatInformation, error) {
episodes, err := extractEpisodes(url, crunchyroll.JP, crunchyroll.LOCALE(downloadAudioFlag))
if err != nil {
return nil, err
}
japanese := episodes[0]
custom := episodes[1]
sort.Sort(utils.EpisodesByNumber(japanese))
sort.Sort(utils.EpisodesByNumber(custom))
var errMessages []string
var final []*crunchyroll.Episode
if len(japanese) == 0 || len(japanese) == len(custom) {
final = custom
} else {
for _, jp := range japanese {
before := len(final)
for _, episode := range custom {
if jp.SeasonNumber == episode.SeasonNumber && jp.EpisodeNumber == episode.EpisodeNumber {
final = append(final, episode)
}
}
if before == len(final) {
errMessages = append(errMessages, fmt.Sprintf("%s has no %s audio, using %s as fallback", jp.Title, crunchyroll.LOCALE(downloadAudioFlag), crunchyroll.JP))
final = append(final, jp)
}
}
}
if len(errMessages) > 10 {
for _, msg := range errMessages[:10] {
out.SetProgress(msg)
}
out.SetProgress("... and %d more", len(errMessages)-10)
} else {
for _, msg := range errMessages {
out.SetProgress(msg)
}
}
var infoFormat [][]formatInformation
for _, season := range utils.SortEpisodesBySeason(final) {
tmpFormatInformation := make([]formatInformation, 0)
for _, episode := range season {
format, err := episode.GetFormat(downloadResolutionFlag, crunchyroll.LOCALE(downloadSubtitleFlag), true)
if err != nil {
return nil, fmt.Errorf("error while receiving format for %s: %v", episode.Title, err)
}
tmpFormatInformation = append(tmpFormatInformation, formatInformation{
format: format,
Title: episode.Title,
SeriesName: episode.SeriesTitle,
SeasonName: episode.SeasonTitle,
SeasonNumber: episode.SeasonNumber,
EpisodeNumber: episode.EpisodeNumber,
Resolution: format.Video.Resolution,
FPS: format.Video.FrameRate,
Audio: format.AudioLocale,
})
}
infoFormat = append(infoFormat, tmpFormatInformation)
}
return infoFormat, nil
}

View file

@ -1,193 +0,0 @@
package cmd
import (
"fmt"
"io"
"log"
"os"
"runtime"
"strings"
"sync"
"time"
)
var prefix, progressDown, progressDownFinish string
func initPrefixBecauseWindowsSucksBallsHard() {
// dear windows user, please change to a good OS, linux in the best case.
// MICROSHIT DOES NOT GET IT DONE TO SHOW THE SYMBOLS IN THE ELSE CLAUSE
// CORRECTLY. NOT IN THE CMD NOR POWERSHELL. WHY TF, IT IS ONE OF THE MOST
// PROFITABLE COMPANIES ON THIS PLANET AND CANNOT SHOW A PROPER UTF-8 SYMBOL
// IN THEIR OWN PRODUCT WHICH GETS USED MILLION TIMES A DAY
if runtime.GOOS == "windows" {
prefix = "=>"
progressDown = "|"
progressDownFinish = "->"
} else {
prefix = "➞"
progressDown = "↓"
progressDownFinish = "↳"
}
}
type progress struct {
message string
stop bool
}
type logger struct {
DebugLog *log.Logger
InfoLog *log.Logger
ErrLog *log.Logger
devView bool
progress chan progress
done chan interface{}
lock sync.Mutex
}
func newLogger(debug, info, err bool) *logger {
initPrefixBecauseWindowsSucksBallsHard()
debugLog, infoLog, errLog := log.New(io.Discard, prefix+" ", 0), log.New(io.Discard, prefix+" ", 0), log.New(io.Discard, prefix+" ", 0)
if debug {
debugLog.SetOutput(os.Stdout)
}
if info {
infoLog.SetOutput(os.Stdout)
}
if err {
errLog.SetOutput(os.Stderr)
}
if debug {
debugLog = log.New(debugLog.Writer(), "[debug] ", 0)
infoLog = log.New(infoLog.Writer(), "[info] ", 0)
errLog = log.New(errLog.Writer(), "[err] ", 0)
}
return &logger{
DebugLog: debugLog,
InfoLog: infoLog,
ErrLog: errLog,
devView: debug,
}
}
func (l *logger) IsDev() bool {
return l.devView
}
func (l *logger) IsQuiet() bool {
return l.DebugLog.Writer() == io.Discard && l.InfoLog.Writer() == io.Discard && l.ErrLog.Writer() == io.Discard
}
func (l *logger) Debug(format string, v ...interface{}) {
l.DebugLog.Printf(format, v...)
}
func (l *logger) Info(format string, v ...interface{}) {
l.InfoLog.Printf(format, v...)
}
func (l *logger) Err(format string, v ...interface{}) {
l.ErrLog.Printf(format, v...)
}
func (l *logger) Exit(format string, v ...interface{}) {
fmt.Fprintln(l.ErrLog.Writer(), fmt.Sprintf(format, v...))
}
func (l *logger) Empty() {
if !l.devView && l.InfoLog.Writer() != io.Discard {
fmt.Println("")
}
}
func (l *logger) SetProgress(format string, v ...interface{}) {
if out.InfoLog.Writer() == io.Discard {
return
} else if l.devView {
l.Debug(format, v...)
return
}
initialMessage := fmt.Sprintf(format, v...)
p := progress{
message: initialMessage,
}
l.lock.Lock()
if l.done != nil {
l.progress <- p
return
} else {
l.progress = make(chan progress, 1)
l.progress <- p
l.done = make(chan interface{})
}
go func() {
states := []string{"-", "\\", "|", "/"}
var count int
for i := 0; ; i++ {
select {
case p := <-l.progress:
if p.stop {
fmt.Printf("\r" + strings.Repeat(" ", len(prefix)+len(initialMessage)))
if count > 1 {
fmt.Printf("\r%s %s\n", progressDownFinish, p.message)
} else {
fmt.Printf("\r%s %s\n", prefix, p.message)
}
if l.done != nil {
l.done <- nil
}
l.progress = nil
l.lock.Unlock()
return
} else {
if count > 0 {
fmt.Printf("\r%s %s\n", progressDown, p.message)
}
l.progress = make(chan progress, 1)
count++
fmt.Printf("\r%s %s", states[i/10%4], initialMessage)
l.lock.Unlock()
}
default:
if i%10 == 0 {
fmt.Printf("\r%s %s", states[i/10%4], initialMessage)
}
time.Sleep(35 * time.Millisecond)
}
}
}()
}
func (l *logger) StopProgress(format string, v ...interface{}) {
if out.InfoLog.Writer() == io.Discard {
return
} else if l.devView {
l.Debug(format, v...)
return
}
l.lock.Lock()
l.progress <- progress{
message: fmt.Sprintf(format, v...),
stop: true,
}
<-l.done
l.done = nil
}

View file

@ -1,81 +0,0 @@
package cmd
import (
"fmt"
"github.com/ByteDream/crunchyroll-go"
"github.com/spf13/cobra"
"os"
"os/user"
"path/filepath"
"runtime"
)
var (
loginPersistentFlag bool
loginSessionIDFlag bool
)
var loginCmd = &cobra.Command{
Use: "login",
Short: "Login to crunchyroll",
Args: cobra.RangeArgs(1, 2),
Run: func(cmd *cobra.Command, args []string) {
if loginSessionIDFlag {
loginSessionID(args[0])
} else {
loginCredentials(args[0], args[1])
}
},
}
func init() {
loginCmd.Flags().BoolVar(&loginPersistentFlag,
"persistent",
false,
"If the given credential should be stored persistent")
loginCmd.Flags().BoolVar(&loginSessionIDFlag,
"session-id",
false,
"Use a session id to login instead of username and password")
rootCmd.AddCommand(loginCmd)
}
func loginCredentials(user, password string) error {
out.Debug("Logging in via credentials")
if _, err := crunchyroll.LoginWithCredentials(user, password, systemLocale(false), client); err != nil {
out.Err(err.Error())
os.Exit(1)
}
return os.WriteFile(loginStorePath(), []byte(fmt.Sprintf("%s\n%s", user, password)), 0600)
}
func loginSessionID(sessionID string) error {
out.Debug("Logging in via session id")
if _, err := crunchyroll.LoginWithSessionID(sessionID, systemLocale(false), client); err != nil {
out.Err(err.Error())
os.Exit(1)
}
return os.WriteFile(loginStorePath(), []byte(sessionID), 0600)
}
func loginStorePath() string {
path := filepath.Join(os.TempDir(), ".crunchy")
if loginPersistentFlag {
if runtime.GOOS != "windows" {
usr, _ := user.Current()
path = filepath.Join(usr.HomeDir, ".config/crunchy")
}
out.Info("The login information will be stored permanently UNENCRYPTED on your drive (%s)", path)
} else if runtime.GOOS != "windows" {
out.Info("Due to security reasons, you have to login again on the next reboot")
}
return path
}

View file

@ -1,68 +0,0 @@
package cmd
import (
"context"
"github.com/ByteDream/crunchyroll-go"
"github.com/spf13/cobra"
"net/http"
"os"
"runtime/debug"
"strings"
)
var (
client *http.Client
crunchy *crunchyroll.Crunchyroll
out = newLogger(false, true, true)
quietFlag bool
verboseFlag bool
proxyFlag string
)
var rootCmd = &cobra.Command{
Use: "crunchyroll",
Short: "Download crunchyroll videos with ease. See the wiki for details about the cli and library: https://github.com/ByteDream/crunchyroll-go/wiki",
SilenceErrors: true,
SilenceUsage: true,
PersistentPreRunE: func(cmd *cobra.Command, args []string) (err error) {
if verboseFlag {
out = newLogger(true, true, true)
} else if quietFlag {
out = newLogger(false, false, false)
}
out.DebugLog.Printf("Executing `%s` command with %d arg(s)\n", cmd.Name(), len(args))
client, err = createOrDefaultClient(proxyFlag)
return
},
}
func init() {
rootCmd.PersistentFlags().BoolVarP(&quietFlag, "quiet", "q", false, "Disable all output")
rootCmd.PersistentFlags().BoolVarP(&verboseFlag, "verbose", "v", false, "Adds debug messages to the normal output")
rootCmd.PersistentFlags().StringVarP(&proxyFlag, "proxy", "p", "", "Proxy to use")
}
func Execute() {
rootCmd.CompletionOptions.DisableDefaultCmd = true
defer func() {
if r := recover(); r != nil {
if out.IsDev() {
out.Err("%v: %s", r, debug.Stack())
} else {
out.Err("Unexpected error: %v", r)
}
os.Exit(1)
}
}()
if err := rootCmd.Execute(); err != nil {
if !strings.HasSuffix(err.Error(), context.Canceled.Error()) {
out.Exit("An error occurred: %v", err)
}
os.Exit(1)
}
}

View file

@ -1,407 +0,0 @@
package cmd
import (
"fmt"
"github.com/ByteDream/crunchyroll-go"
"github.com/ByteDream/crunchyroll-go/utils"
"net/http"
"net/url"
"os"
"os/exec"
"os/user"
"path/filepath"
"reflect"
"regexp"
"runtime"
"sort"
"strconv"
"strings"
"sync"
"time"
)
var (
// ahh i love windows :)))
invalidWindowsChars = []string{"\\", "<", ">", ":", "\"", "/", "|", "?", "*"}
invalidNotWindowsChars = []string{"/"}
)
var urlFilter = regexp.MustCompile(`(S(\d+))?(E(\d+))?((-)(S(\d+))?(E(\d+))?)?(,|$)`)
// systemLocale receives the system locale
// https://stackoverflow.com/questions/51829386/golang-get-system-language/51831590#51831590
func systemLocale(verbose bool) crunchyroll.LOCALE {
if runtime.GOOS != "windows" {
if lang, ok := os.LookupEnv("LANG"); ok {
prefix := strings.Split(lang, "_")[0]
suffix := strings.Split(strings.Split(lang, ".")[0], "_")[1]
l := crunchyroll.LOCALE(fmt.Sprintf("%s-%s", prefix, suffix))
if !utils.ValidateLocale(l) {
if verbose {
out.Err("%s is not a supported locale, using %s as fallback", l, crunchyroll.US)
}
l = crunchyroll.US
}
return l
}
} else {
cmd := exec.Command("powershell", "Get-Culture | select -exp Name")
if output, err := cmd.Output(); err == nil {
l := crunchyroll.LOCALE(strings.Trim(string(output), "\r\n"))
if !utils.ValidateLocale(l) {
if verbose {
out.Err("%s is not a supported locale, using %s as fallback", l, crunchyroll.US)
}
l = crunchyroll.US
}
return l
}
}
if verbose {
out.Err("Failed to get locale, using %s", crunchyroll.US)
}
return crunchyroll.US
}
func allLocalesAsStrings() (locales []string) {
for _, locale := range utils.AllLocales {
locales = append(locales, string(locale))
}
sort.Strings(locales)
return
}
func createOrDefaultClient(proxy string) (*http.Client, error) {
if proxy == "" {
return http.DefaultClient, nil
} else {
out.Info("Using custom proxy %s", proxy)
proxyURL, err := url.Parse(proxy)
if err != nil {
return nil, err
}
client := &http.Client{
Transport: &http.Transport{
DisableCompression: true,
Proxy: http.ProxyURL(proxyURL),
},
Timeout: 30 * time.Second,
}
return client, nil
}
}
func freeFileName(filename string) (string, bool) {
ext := filepath.Ext(filename)
base := strings.TrimSuffix(filename, ext)
// checks if a .tar stands before the "actual" file ending
if extraExt := filepath.Ext(base); extraExt == ".tar" {
ext = extraExt + ext
base = strings.TrimSuffix(base, extraExt)
}
j := 0
for ; ; j++ {
if _, stat := os.Stat(filename); stat != nil && !os.IsExist(stat) {
break
}
filename = fmt.Sprintf("%s (%d)%s", base, j+1, ext)
}
return filename, j != 0
}
func loadCrunchy() {
out.SetProgress("Logging in")
files := []string{filepath.Join(os.TempDir(), ".crunchy")}
if runtime.GOOS != "windows" {
usr, _ := user.Current()
files = append(files, filepath.Join(usr.HomeDir, ".config/crunchy"))
}
var body []byte
var err error
for _, file := range files {
if _, err = os.Stat(file); os.IsNotExist(err) {
continue
}
body, err = os.ReadFile(file)
break
}
if body == nil {
out.StopProgress("To use this command, login first. Type `%s login -h` to get help", os.Args[0])
os.Exit(1)
} else if err != nil {
out.StopProgress("Failed to read login information: %v", err)
os.Exit(1)
}
split := strings.SplitN(string(body), "\n", 2)
if len(split) == 1 || split[1] == "" {
if crunchy, err = crunchyroll.LoginWithSessionID(split[0], systemLocale(true), client); err != nil {
out.StopProgress(err.Error())
os.Exit(1)
}
out.Debug("Logged in with session id %s. BLANK THIS LINE OUT IF YOU'RE ASKED TO POST THE DEBUG OUTPUT SOMEWHERE", split[0])
} else {
if crunchy, err = crunchyroll.LoginWithCredentials(split[0], split[1], systemLocale(true), client); err != nil {
out.StopProgress(err.Error())
os.Exit(1)
}
out.Debug("Logged in with username '%s' and password '%s'. BLANK THIS LINE OUT IF YOU'RE ASKED TO POST THE DEBUG OUTPUT SOMEWHERE", split[0], split[1])
}
out.StopProgress("Logged in")
}
func hasFFmpeg() bool {
return exec.Command("ffmpeg", "-h").Run() == nil
}
func terminalWidth() int {
if runtime.GOOS != "windows" {
cmd := exec.Command("stty", "size")
cmd.Stdin = os.Stdin
res, err := cmd.Output()
if err != nil {
return 60
}
// on alpine linux the command `stty size` does not respond the terminal size
// but something like "stty: standard input". this may also apply to other systems
splitOutput := strings.SplitN(strings.ReplaceAll(string(res), "\n", ""), " ", 2)
if len(splitOutput) == 1 {
return 60
}
width, err := strconv.Atoi(splitOutput[1])
if err != nil {
return 60
}
return width
}
return 60
}
func generateFilename(name, directory string) string {
if runtime.GOOS != "windows" {
for _, char := range invalidNotWindowsChars {
name = strings.ReplaceAll(name, char, "")
}
out.Debug("Replaced invalid characters (not windows)")
} else {
for _, char := range invalidWindowsChars {
name = strings.ReplaceAll(name, char, "")
}
out.Debug("Replaced invalid characters (windows)")
}
filename, changed := freeFileName(filepath.Join(directory, name))
if changed {
out.Debug("File `%s` already exists, changing name to `%s`", filepath.Base(name), filepath.Base(filename))
}
return filename
}
func extractEpisodes(url string, locales ...crunchyroll.LOCALE) ([][]*crunchyroll.Episode, error) {
var matches [][]string
lastOpen := strings.LastIndex(url, "[")
if strings.HasSuffix(url, "]") && lastOpen != -1 && lastOpen < len(url) {
matches = urlFilter.FindAllStringSubmatch(url[lastOpen+1:len(url)-1], -1)
var all string
for _, match := range matches {
all += match[0]
}
if all != url[lastOpen+1:len(url)-1] {
return nil, fmt.Errorf("invalid episode filter")
}
url = url[:lastOpen]
}
final := make([][]*crunchyroll.Episode, len(locales))
episodes, err := crunchy.ExtractEpisodesFromUrl(url, locales...)
if err != nil {
return nil, fmt.Errorf("failed to get episodes: %v", err)
}
if len(episodes) == 0 {
return nil, fmt.Errorf("no episodes found")
}
if matches != nil {
for _, match := range matches {
fromSeason, fromEpisode, toSeason, toEpisode := -1, -1, -1, -1
if match[2] != "" {
fromSeason, _ = strconv.Atoi(match[2])
}
if match[4] != "" {
fromEpisode, _ = strconv.Atoi(match[4])
}
if match[8] != "" {
toSeason, _ = strconv.Atoi(match[8])
}
if match[10] != "" {
toEpisode, _ = strconv.Atoi(match[10])
}
if match[6] != "-" {
toSeason = fromSeason
toEpisode = fromEpisode
}
tmpEps := make([]*crunchyroll.Episode, 0)
for _, episode := range episodes {
if fromSeason != -1 && (episode.SeasonNumber < fromSeason || (fromEpisode != -1 && episode.EpisodeNumber < fromEpisode)) {
continue
} else if fromSeason == -1 && fromEpisode != -1 && episode.EpisodeNumber < fromEpisode {
continue
} else if toSeason != -1 && (episode.SeasonNumber > toSeason || (toEpisode != -1 && episode.EpisodeNumber > toEpisode)) {
continue
} else if toSeason == -1 && toEpisode != -1 && episode.EpisodeNumber > toEpisode {
continue
} else {
tmpEps = append(tmpEps, episode)
}
}
if len(tmpEps) == 0 {
return nil, fmt.Errorf("no episodes are matching the given filter")
}
episodes = tmpEps
}
}
localeSorted, err := utils.SortEpisodesByAudio(episodes)
if err != nil {
return nil, fmt.Errorf("failed to get audio locale: %v", err)
}
for i, locale := range locales {
final[i] = append(final[i], localeSorted[locale]...)
}
return final, nil
}
type formatInformation struct {
// the format to download
format *crunchyroll.Format
// additional formats which are only used by archive.go
additionalFormats []*crunchyroll.Format
Title string `json:"title"`
SeriesName string `json:"series_name"`
SeasonName string `json:"season_name"`
SeasonNumber int `json:"season_number"`
EpisodeNumber int `json:"episode_number"`
Resolution string `json:"resolution"`
FPS float64 `json:"fps"`
Audio crunchyroll.LOCALE `json:"audio"`
Subtitle crunchyroll.LOCALE `json:"subtitle"`
}
func (fi formatInformation) Format(source string) string {
fields := reflect.TypeOf(fi)
values := reflect.ValueOf(fi)
for i := 0; i < fields.NumField(); i++ {
var valueAsString string
switch value := values.Field(i); value.Kind() {
case reflect.String:
valueAsString = value.String()
case reflect.Int:
valueAsString = fmt.Sprintf("%02d", value.Int())
case reflect.Float64:
valueAsString = fmt.Sprintf("%.2f", value.Float())
case reflect.Bool:
valueAsString = fields.Field(i).Tag.Get("json")
if !value.Bool() {
valueAsString = "no " + valueAsString
}
}
if runtime.GOOS != "windows" {
for _, char := range invalidNotWindowsChars {
valueAsString = strings.ReplaceAll(valueAsString, char, "")
}
out.Debug("Replaced invalid characters (not windows)")
} else {
for _, char := range invalidWindowsChars {
valueAsString = strings.ReplaceAll(valueAsString, char, "")
}
out.Debug("Replaced invalid characters (windows)")
}
source = strings.ReplaceAll(source, "{"+fields.Field(i).Tag.Get("json")+"}", valueAsString)
}
return source
}
type downloadProgress struct {
Prefix string
Message string
Total int
Current int
Dev bool
Quiet bool
lock sync.Mutex
}
func (dp *downloadProgress) Update() {
dp.update("", false)
}
func (dp *downloadProgress) UpdateMessage(msg string, permanent bool) {
dp.update(msg, permanent)
}
func (dp *downloadProgress) update(msg string, permanent bool) {
if dp.Quiet {
return
}
if dp.Current >= dp.Total {
return
}
dp.lock.Lock()
defer dp.lock.Unlock()
dp.Current++
if msg == "" {
msg = dp.Message
}
if permanent {
dp.Message = msg
}
if dp.Dev {
fmt.Printf("%s%s\n", dp.Prefix, msg)
return
}
percentage := float32(dp.Current) / float32(dp.Total) * 100
pre := fmt.Sprintf("%s%s [", dp.Prefix, msg)
post := fmt.Sprintf("]%4d%% %8d/%d", int(percentage), dp.Current, dp.Total)
// i don't really know why +2 is needed here but without it the Printf below would not print to the line end
progressWidth := terminalWidth() - len(pre) - len(post) + 2
repeatCount := int(percentage / float32(100) * float32(progressWidth))
// it can be lower than zero when the terminal is very tiny
if repeatCount < 0 {
repeatCount = 0
}
progressPercentage := strings.Repeat("=", repeatCount)
if dp.Current != dp.Total {
progressPercentage += ">"
}
fmt.Printf("\r%s%-"+fmt.Sprint(progressWidth)+"s%s", pre, progressPercentage, post)
}

View file

@ -1,9 +0,0 @@
package main
import (
"github.com/ByteDream/crunchyroll-go/cmd/crunchyroll-go/cmd"
)
func main() {
cmd.Execute()
}

View file

@ -0,0 +1,51 @@
[package]
name = "crunchy-cli-core"
authors = ["Crunchy Labs Maintainers"]
version = "3.6.7"
edition = "2021"
license = "MIT"
[features]
rustls-tls = ["reqwest/rustls-tls"]
native-tls = ["reqwest/native-tls", "reqwest/native-tls-alpn"]
openssl-tls = ["reqwest/native-tls", "reqwest/native-tls-alpn", "dep:rustls-native-certs"]
openssl-tls-static = ["reqwest/native-tls", "reqwest/native-tls-alpn", "reqwest/native-tls-vendored", "dep:rustls-native-certs"]
[dependencies]
anyhow = "1.0"
async-speed-limit = "0.4"
clap = { version = "4.5", features = ["derive", "string"] }
chrono = "0.4"
crunchyroll-rs = { version = "0.11.4", features = ["experimental-stabilizations", "tower"] }
ctrlc = "3.4"
dialoguer = { version = "0.11", default-features = false }
dirs = "5.0"
derive_setters = "0.1"
futures-util = { version = "0.3", features = ["io"] }
fs2 = "0.4"
http = "1.1"
indicatif = "0.17"
lazy_static = "1.4"
log = { version = "0.4", features = ["std"] }
num_cpus = "1.16"
regex = "1.10"
reqwest = { version = "0.12", features = ["socks", "stream"] }
rsubs-lib = "~0.3.2"
rusty-chromaprint = "0.2"
serde = "1.0"
serde_json = "1.0"
serde_plain = "1.0"
shlex = "1.3"
sys-locale = "0.3"
tempfile = "3.10"
time = "0.3"
tokio = { version = "1.38", features = ["io-util", "macros", "net", "rt-multi-thread", "time"] }
tokio-util = "0.7"
tower-service = "0.3"
rustls-native-certs = { version = "0.7", optional = true }
[target.'cfg(not(target_os = "windows"))'.dependencies]
nix = { version = "0.28", features = ["fs"] }
[build-dependencies]
chrono = "0.4"

34
crunchy-cli-core/build.rs Normal file
View file

@ -0,0 +1,34 @@
fn main() -> std::io::Result<()> {
println!(
"cargo:rustc-env=GIT_HASH={}",
std::env::var("CRUNCHY_CLI_GIT_HASH")
.or::<std::io::Error>(Ok(get_short_commit_hash()?.unwrap_or_default()))?
);
println!(
"cargo:rustc-env=BUILD_DATE={}",
chrono::Utc::now().format("%F")
);
Ok(())
}
fn get_short_commit_hash() -> std::io::Result<Option<String>> {
let git = std::process::Command::new("git")
.arg("rev-parse")
.arg("--short")
.arg("HEAD")
.output();
match git {
Ok(cmd) => Ok(Some(
String::from_utf8_lossy(cmd.stdout.as_slice()).to_string(),
)),
Err(e) => {
if e.kind() != std::io::ErrorKind::NotFound {
Err(e)
} else {
Ok(None)
}
}
}
}

View file

@ -0,0 +1,692 @@
use crate::utils::context::Context;
use crate::utils::download::{
DownloadBuilder, DownloadFormat, DownloadFormatMetadata, MergeBehavior,
};
use crate::utils::ffmpeg::FFmpegPreset;
use crate::utils::filter::{Filter, FilterMediaScope};
use crate::utils::format::{Format, SingleFormat};
use crate::utils::locale::{all_locale_in_locales, resolve_locales, LanguageTagging};
use crate::utils::log::progress;
use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
use crate::utils::parse::parse_url;
use crate::utils::video::stream_data_from_stream;
use crate::Execute;
use anyhow::bail;
use anyhow::Result;
use chrono::Duration;
use crunchyroll_rs::media::{Resolution, Subtitle};
use crunchyroll_rs::Locale;
use log::{debug, warn};
use regex::Regex;
use std::fmt::{Display, Formatter};
use std::iter::zip;
use std::ops::Sub;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
#[derive(Clone, Debug, clap::Parser)]
#[clap(about = "Archive a video")]
#[command(arg_required_else_help(true))]
pub struct Archive {
#[arg(help = format!("Audio languages. Can be used multiple times. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Audio languages. Can be used multiple times. \
Available languages are:\n {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| format!("{:<6} {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
#[arg(short, long, default_values_t = vec![Locale::ja_JP, crate::utils::locale::system_locale()])]
pub(crate) audio: Vec<Locale>,
#[arg(skip)]
output_audio_locales: Vec<String>,
#[arg(help = format!("Subtitle languages. Can be used multiple times. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Subtitle languages. Can be used multiple times. \
Available languages are: {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(short, long, default_values_t = Locale::all())]
pub(crate) subtitle: Vec<Locale>,
#[arg(skip)]
output_subtitle_locales: Vec<String>,
#[arg(help = "Name of the output file")]
#[arg(long_help = "Name of the output file. \
If you use one of the following pattern they will get replaced:\n \
{title} Title of the video\n \
{series_name} Name of the series\n \
{season_name} Name of the season\n \
{audio} Audio language of the video\n \
{width} Width of the video\n \
{height} Height of the video\n \
{season_number} Number of the season\n \
{episode_number} Number of the episode\n \
{relative_episode_number} Number of the episode relative to its season\n \
{sequence_number} Like '{episode_number}' but without possible non-number characters\n \
{relative_sequence_number} Like '{relative_episode_number}' but with support for episode 0's and .5's\n \
{release_year} Release year of the video\n \
{release_month} Release month of the video\n \
{release_day} Release day of the video\n \
{series_id} ID of the series\n \
{season_id} ID of the season\n \
{episode_id} ID of the episode")]
#[arg(short, long, default_value = "{title}.mkv")]
pub(crate) output: String,
#[arg(help = "Name of the output file if the episode is a special")]
#[arg(long_help = "Name of the output file if the episode is a special. \
If not set, the '-o'/'--output' flag will be used as name template")]
#[arg(long)]
pub(crate) output_specials: Option<String>,
#[arg(help = "Sanitize the output file for use with all operating systems. \
This option only affects template options and not static characters.")]
#[arg(long, default_value_t = false)]
pub(crate) universal_output: bool,
#[arg(help = "Video resolution")]
#[arg(long_help = "The video resolution. \
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
Specifying the exact pixels is not recommended, use one of the other options instead. \
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
The available common-use words are 'best' (choose the best resolution available) and 'worst' (worst resolution available)")]
#[arg(short, long, default_value = "best")]
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
pub(crate) resolution: Resolution,
#[arg(
help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'sync', 'audio' and 'video'"
)]
#[arg(
long_help = "Because of local restrictions (or other reasons) some episodes with different languages does not have the same length (e.g. when some scenes were cut out). \
With this flag you can set the behavior when handling multiple language.
Valid options are 'audio' (stores one video and all other languages as audio only), 'video' (stores the video + audio for every language), 'auto' (detects if videos differ in length: if so, behave like 'video' else like 'audio') and 'sync' (detects if videos differ in length: if so, tries to find the offset of matching audio parts and removes it from the beginning, otherwise it behaves like 'audio')"
)]
#[arg(short, long, default_value = "auto")]
#[arg(value_parser = MergeBehavior::parse)]
pub(crate) merge: MergeBehavior,
#[arg(
help = "If the merge behavior is 'auto' or 'sync', consider videos to be of equal lengths if the difference in length is smaller than the specified milliseconds"
)]
#[arg(long, default_value_t = 200)]
pub(crate) merge_time_tolerance: u32,
#[arg(
help = "If the merge behavior is 'sync', specify the difference by which two fingerprints are considered equal, higher values can help when the algorithm fails"
)]
#[arg(long, default_value_t = 6)]
pub(crate) merge_sync_tolerance: u32,
#[arg(
help = "If the merge behavior is 'sync', specify the amount of offset determination runs from which the final offset is calculated, higher values will increase the time required but lead to more precise offsets"
)]
#[arg(long, default_value_t = 4)]
pub(crate) merge_sync_precision: u32,
#[arg(
help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard)"
)]
#[arg(
long_help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard; you might run in issues as there are multiple locales which resolve to the same IETF language code, e.g. 'es-LA' and 'es-ES' are both resolving to 'es')"
)]
#[arg(long)]
#[arg(value_parser = LanguageTagging::parse)]
pub(crate) language_tagging: Option<LanguageTagging>,
#[arg(help = format!("Presets for converting the video to a specific coding format. \
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
#[arg(long_help = format!("Presets for converting the video to a specific coding format. \
If you need more specific ffmpeg customizations you can pass ffmpeg output arguments instead of a preset as value. \
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
#[arg(long)]
#[arg(value_parser = FFmpegPreset::parse)]
pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
#[arg(
help = "The number of threads used by ffmpeg to generate the output file. Does not work with every codec/preset"
)]
#[arg(
long_help = "The number of threads used by ffmpeg to generate the output file. \
Does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`. \
By default, ffmpeg chooses the thread count which works best for the output codec"
)]
#[arg(long)]
pub(crate) ffmpeg_threads: Option<usize>,
#[arg(
help = "Set which subtitle language should be set as default / auto shown when starting a video"
)]
#[arg(long)]
pub(crate) default_subtitle: Option<Locale>,
#[arg(help = "Include fonts in the downloaded file")]
#[arg(long)]
pub(crate) include_fonts: bool,
#[arg(
help = "Includes chapters (e.g. intro, credits, ...). Only works if `--merge` is set to 'audio'"
)]
#[arg(
long_help = "Includes chapters (e.g. intro, credits, ...). . Only works if `--merge` is set to 'audio'. \
Because chapters are essentially only special timeframes in episodes like the intro, most of the video timeline isn't covered by a chapter.
These \"gaps\" are filled with an 'Episode' chapter because many video players are ignore those gaps and just assume that a chapter ends when the next chapter start is reached, even if a specific end-time is set.
Also chapters aren't always available, so in this case, just a big 'Episode' chapter from start to end will be created"
)]
#[arg(long, default_value_t = false)]
pub(crate) include_chapters: bool,
#[arg(help = "Omit closed caption subtitles in the downloaded file")]
#[arg(long, default_value_t = false)]
pub(crate) no_closed_caption: bool,
#[arg(help = "Skip files which are already existing by their name")]
#[arg(long, default_value_t = false)]
pub(crate) skip_existing: bool,
#[arg(
help = "Only works in combination with `--skip-existing`. Sets the method how already existing files should be skipped. Valid methods are 'audio' and 'subtitle'"
)]
#[arg(long_help = "Only works in combination with `--skip-existing`. \
By default, already existing files are determined by their name and the download of the corresponding episode is skipped. \
With this flag you can modify this behavior. \
Valid options are 'audio' and 'subtitle' (if the file already exists but the audio/subtitle are less from what should be downloaded, the episode gets downloaded and the file overwritten).")]
#[arg(long, default_values_t = SkipExistingMethod::default())]
#[arg(value_parser = SkipExistingMethod::parse)]
pub(crate) skip_existing_method: Vec<SkipExistingMethod>,
#[arg(help = "Skip special episodes")]
#[arg(long, default_value_t = false)]
pub(crate) skip_specials: bool,
#[arg(help = "Skip any interactive input")]
#[arg(short, long, default_value_t = false)]
pub(crate) yes: bool,
#[arg(help = "The number of threads used to download")]
#[arg(short, long, default_value_t = num_cpus::get())]
pub(crate) threads: usize,
#[arg(help = "Crunchyroll series url(s)")]
#[arg(required = true)]
pub(crate) urls: Vec<String>,
}
impl Execute for Archive {
fn pre_check(&mut self) -> Result<()> {
if !has_ffmpeg() {
bail!("FFmpeg is needed to run this command")
} else if PathBuf::from(&self.output)
.extension()
.unwrap_or_default()
.to_string_lossy()
!= "mkv"
&& !is_special_file(&self.output)
&& self.output != "-"
{
bail!("File extension is not '.mkv'. Currently only matroska / '.mkv' files are supported")
} else if let Some(special_output) = &self.output_specials {
if PathBuf::from(special_output)
.extension()
.unwrap_or_default()
.to_string_lossy()
!= "mkv"
&& !is_special_file(special_output)
&& special_output != "-"
{
bail!("File extension for special episodes is not '.mkv'. Currently only matroska / '.mkv' files are supported")
}
}
if self.include_chapters
&& !matches!(self.merge, MergeBehavior::Sync)
&& !matches!(self.merge, MergeBehavior::Audio)
{
bail!("`--include-chapters` can only be used if `--merge` is set to 'audio' or 'sync'")
}
if !self.skip_existing_method.is_empty() && !self.skip_existing {
warn!("`--skip-existing-method` has no effect if `--skip-existing` is not set")
}
self.audio = all_locale_in_locales(self.audio.clone());
self.subtitle = all_locale_in_locales(self.subtitle.clone());
if let Some(language_tagging) = &self.language_tagging {
self.audio = resolve_locales(&self.audio);
self.subtitle = resolve_locales(&self.subtitle);
self.output_audio_locales = language_tagging.convert_locales(&self.audio);
self.output_subtitle_locales = language_tagging.convert_locales(&self.subtitle);
} else {
self.output_audio_locales = self
.audio
.clone()
.into_iter()
.map(|l| l.to_string())
.collect();
self.output_subtitle_locales = self
.subtitle
.clone()
.into_iter()
.map(|l| l.to_string())
.collect();
}
Ok(())
}
async fn execute(self, ctx: Context) -> Result<()> {
if !ctx.crunchy.premium().await {
warn!("You may not be able to download all requested videos when logging in anonymously or using a non-premium account")
}
let mut parsed_urls = vec![];
for (i, url) in self.urls.clone().into_iter().enumerate() {
let progress_handler = progress!("Parsing url {}", i + 1);
match parse_url(&ctx.crunchy, url.clone(), true).await {
Ok((media_collection, url_filter)) => {
progress_handler.stop(format!("Parsed url {}", i + 1));
parsed_urls.push((media_collection, url_filter))
}
Err(e) => bail!("url {} could not be parsed: {}", url, e),
};
}
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
let progress_handler = progress!("Fetching series details");
let single_format_collection = Filter::new(
url_filter,
self.audio.clone(),
self.subtitle.clone(),
|scope, locales| {
let audios = locales.into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ");
match scope {
FilterMediaScope::Series(series) => warn!("Series {} is not available with {} audio", series.title, audios),
FilterMediaScope::Season(season) => warn!("Season {} is not available with {} audio", season.season_number, audios),
FilterMediaScope::Episode(episodes) => {
if episodes.len() == 1 {
warn!("Episode {} is not available with {} audio", episodes[0].sequence_number, audios)
} else if episodes.len() == 2 {
warn!("Season {} is only available with {} audio from episode {} to {}", episodes[0].season_number, audios, episodes[0].sequence_number, episodes[1].sequence_number)
} else {
unimplemented!()
}
}
}
Ok(true)
},
|scope, locales| {
let subtitles = locales.into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ");
match scope {
FilterMediaScope::Series(series) => warn!("Series {} is not available with {} subtitles", series.title, subtitles),
FilterMediaScope::Season(season) => warn!("Season {} is not available with {} subtitles", season.season_number, subtitles),
FilterMediaScope::Episode(episodes) => {
if episodes.len() == 1 {
warn!("Episode {} of season {} is not available with {} subtitles", episodes[0].sequence_number, episodes[0].season_title, subtitles)
} else if episodes.len() == 2 {
warn!("Season {} of season {} is only available with {} subtitles from episode {} to {}", episodes[0].season_number, episodes[0].season_title, subtitles, episodes[0].sequence_number, episodes[1].sequence_number)
} else {
unimplemented!()
}
}
}
Ok(true)
},
|season| {
warn!("Skipping premium episodes in season {season}");
Ok(())
},
Format::has_relative_fmt(&self.output),
!self.yes,
self.skip_specials,
ctx.crunchy.premium().await,
)
.visit(media_collection)
.await?;
if single_format_collection.is_empty() {
progress_handler.stop(format!("Skipping url {} (no matching videos found)", i + 1));
continue;
}
progress_handler.stop(format!("Loaded series information for url {}", i + 1));
single_format_collection.full_visual_output();
let download_builder =
DownloadBuilder::new(ctx.client.clone(), ctx.rate_limiter.clone())
.default_subtitle(self.default_subtitle.clone())
.download_fonts(self.include_fonts)
.ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default())
.ffmpeg_threads(self.ffmpeg_threads)
.output_format(Some("matroska".to_string()))
.audio_sort(Some(self.audio.clone()))
.subtitle_sort(Some(self.subtitle.clone()))
.no_closed_caption(self.no_closed_caption)
.merge_sync_tolerance(match self.merge {
MergeBehavior::Sync => Some(self.merge_sync_tolerance),
_ => None,
})
.merge_sync_precision(match self.merge {
MergeBehavior::Sync => Some(self.merge_sync_precision),
_ => None,
})
.threads(self.threads)
.audio_locale_output_map(
zip(self.audio.clone(), self.output_audio_locales.clone()).collect(),
)
.subtitle_locale_output_map(
zip(self.subtitle.clone(), self.output_subtitle_locales.clone()).collect(),
);
for single_formats in single_format_collection.into_iter() {
let (download_formats, mut format) = get_format(&self, &single_formats).await?;
let mut downloader = download_builder.clone().build();
for download_format in download_formats {
downloader.add_format(download_format)
}
let formatted_path = if format.is_special() {
format.format_path(
self.output_specials
.as_ref()
.map_or((&self.output).into(), |so| so.into()),
self.universal_output,
self.language_tagging.as_ref(),
)
} else {
format.format_path(
(&self.output).into(),
self.universal_output,
self.language_tagging.as_ref(),
)
};
let (mut path, changed) = free_file(formatted_path.clone());
if changed && self.skip_existing {
let mut skip = true;
if !self.skip_existing_method.is_empty() {
if let Some((audio_locales, subtitle_locales)) =
get_video_streams(&formatted_path)?
{
let method_audio = self
.skip_existing_method
.contains(&SkipExistingMethod::Audio);
let method_subtitle = self
.skip_existing_method
.contains(&SkipExistingMethod::Subtitle);
let audio_differ = if method_audio {
format
.locales
.iter()
.any(|(a, _)| !audio_locales.contains(a))
} else {
false
};
let subtitle_differ = if method_subtitle {
format
.locales
.clone()
.into_iter()
.flat_map(|(a, mut s)| {
// remove the closed caption if the flag is given to omit
// closed captions
if self.no_closed_caption && a != Locale::ja_JP {
s.retain(|l| l != &a)
}
s
})
.any(|l| !subtitle_locales.contains(&l))
} else {
false
};
if (method_audio && audio_differ)
|| (method_subtitle && subtitle_differ)
{
skip = false;
path.clone_from(&formatted_path)
}
}
}
if skip {
debug!(
"Skipping already existing file '{}'",
formatted_path.to_string_lossy()
);
continue;
}
}
format.locales.sort_by(|(a, _), (b, _)| {
self.audio
.iter()
.position(|l| l == a)
.cmp(&self.audio.iter().position(|l| l == b))
});
for (_, subtitles) in format.locales.iter_mut() {
subtitles.sort_by(|a, b| {
self.subtitle
.iter()
.position(|l| l == a)
.cmp(&self.subtitle.iter().position(|l| l == b))
})
}
format.visual_output(&path);
downloader.download(&path).await?
}
}
Ok(())
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub(crate) enum SkipExistingMethod {
Audio,
Subtitle,
}
impl Display for SkipExistingMethod {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let value = match self {
SkipExistingMethod::Audio => "audio",
SkipExistingMethod::Subtitle => "subtitle",
};
write!(f, "{}", value)
}
}
impl SkipExistingMethod {
fn parse(s: &str) -> Result<Self, String> {
match s.to_lowercase().as_str() {
"audio" => Ok(Self::Audio),
"subtitle" => Ok(Self::Subtitle),
_ => Err(format!("invalid skip existing method '{}'", s)),
}
}
fn default<'a>() -> &'a [Self] {
&[]
}
}
async fn get_format(
archive: &Archive,
single_formats: &Vec<SingleFormat>,
) -> Result<(Vec<DownloadFormat>, Format)> {
let mut format_pairs = vec![];
let mut single_format_to_format_pairs = vec![];
for single_format in single_formats {
let stream = single_format.stream().await?;
let Some((video, audio, _)) =
stream_data_from_stream(&stream, &archive.resolution, None).await?
else {
if single_format.is_episode() {
bail!(
"Resolution ({}) is not available for episode {} ({}) of {} season {}",
archive.resolution,
single_format.episode_number,
single_format.title,
single_format.series_name,
single_format.season_number,
)
} else {
bail!(
"Resolution ({}) is not available for {} ({})",
archive.resolution,
single_format.source_type(),
single_format.title
)
}
};
let subtitles: Vec<(Subtitle, bool)> = archive
.subtitle
.iter()
.flat_map(|s| {
let mut subtitles = vec![];
if let Some(caption) = stream.captions.get(s) {
subtitles.push((caption.clone(), true))
}
if let Some(subtitle) = stream.subtitles.get(s) {
// the subtitle is probably cc if the audio is not japanese or only one subtitle
// exists for this stream
let cc = single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1;
// only include the subtitles if no cc subtitle is already present or if it's
// not cc
if subtitles.is_empty() || !cc {
subtitles.push((subtitle.clone(), cc))
}
}
subtitles
})
.collect();
format_pairs.push((single_format, video.clone(), audio, subtitles.clone()));
single_format_to_format_pairs.push((single_format.clone(), video, subtitles));
stream.invalidate().await?
}
let mut download_formats = vec![];
match archive.merge {
MergeBehavior::Video => {
for (single_format, video, audio, subtitles) in format_pairs {
download_formats.push(DownloadFormat {
video: (video, single_format.audio.clone()),
audios: vec![(audio, single_format.audio.clone())],
subtitles,
metadata: DownloadFormatMetadata { skip_events: None },
})
}
}
MergeBehavior::Audio => download_formats.push(DownloadFormat {
video: (
format_pairs.first().unwrap().1.clone(),
format_pairs.first().unwrap().0.audio.clone(),
),
audios: format_pairs
.iter()
.map(|(single_format, _, audio, _)| (audio.clone(), single_format.audio.clone()))
.collect(),
// mix all subtitles together and then reduce them via a map so that only one subtitle
// per language exists
subtitles: format_pairs
.iter()
.flat_map(|(_, _, _, subtitles)| subtitles.clone())
.collect(),
metadata: DownloadFormatMetadata {
skip_events: if archive.include_chapters {
format_pairs.first().unwrap().0.skip_events().await?
} else {
None
},
},
}),
MergeBehavior::Auto | MergeBehavior::Sync => {
let mut d_formats: Vec<(Duration, DownloadFormat)> = vec![];
for (single_format, video, audio, subtitles) in format_pairs {
let closest_format = d_formats.iter_mut().min_by(|(x, _), (y, _)| {
x.sub(single_format.duration)
.abs()
.cmp(&y.sub(single_format.duration).abs())
});
match closest_format {
Some(closest_format)
if closest_format
.0
.sub(single_format.duration)
.abs()
.num_milliseconds()
< archive.merge_time_tolerance.into() =>
{
// If less than `audio_error` apart, use same audio.
closest_format
.1
.audios
.push((audio, single_format.audio.clone()));
closest_format.1.subtitles.extend(subtitles);
}
_ => {
d_formats.push((
single_format.duration,
DownloadFormat {
video: (video, single_format.audio.clone()),
audios: vec![(audio, single_format.audio.clone())],
subtitles,
metadata: DownloadFormatMetadata {
skip_events: if archive.include_chapters {
single_format.skip_events().await?
} else {
None
},
},
},
));
}
};
}
for (_, d_format) in d_formats.into_iter() {
download_formats.push(d_format);
}
}
}
Ok((
download_formats,
Format::from_single_formats(single_format_to_format_pairs),
))
}
fn get_video_streams(path: &Path) -> Result<Option<(Vec<Locale>, Vec<Locale>)>> {
let video_streams =
Regex::new(r"(?m)Stream\s#\d+:\d+\((?P<language>.+)\):\s(?P<type>(Audio|Subtitle))")
.unwrap();
let ffmpeg = Command::new("ffmpeg")
.stdout(Stdio::null())
.stderr(Stdio::piped())
.arg("-hide_banner")
.args(["-i", &path.to_string_lossy()])
.output()?;
let ffmpeg_output = String::from_utf8(ffmpeg.stderr)?;
let mut audio = vec![];
let mut subtitle = vec![];
for cap in video_streams.captures_iter(&ffmpeg_output) {
let locale = cap.name("language").unwrap().as_str();
let type_ = cap.name("type").unwrap().as_str();
match type_ {
"Audio" => audio.push(Locale::from(locale.to_string())),
"Subtitle" => subtitle.push(Locale::from(locale.to_string())),
_ => unreachable!(),
}
}
if audio.is_empty() && subtitle.is_empty() {
Ok(None)
} else {
Ok(Some((audio, subtitle)))
}
}

View file

@ -0,0 +1,3 @@
mod command;
pub use command::Archive;

View file

@ -0,0 +1,483 @@
use crate::utils::context::Context;
use crate::utils::download::{DownloadBuilder, DownloadFormat, DownloadFormatMetadata};
use crate::utils::ffmpeg::{FFmpegPreset, SOFTSUB_CONTAINERS};
use crate::utils::filter::{Filter, FilterMediaScope};
use crate::utils::format::{Format, SingleFormat};
use crate::utils::locale::{resolve_locales, LanguageTagging};
use crate::utils::log::progress;
use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
use crate::utils::parse::parse_url;
use crate::utils::video::stream_data_from_stream;
use crate::Execute;
use anyhow::bail;
use anyhow::Result;
use crunchyroll_rs::media::Resolution;
use crunchyroll_rs::Locale;
use log::{debug, error, warn};
use std::collections::HashMap;
use std::path::Path;
#[derive(Clone, Debug, clap::Parser)]
#[clap(about = "Download a video")]
#[command(arg_required_else_help(true))]
pub struct Download {
#[arg(help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
Available languages are:\n {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| format!("{:<6} {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
#[arg(short, long, default_value_t = crate::utils::locale::system_locale())]
pub(crate) audio: Locale,
#[arg(skip)]
output_audio_locale: String,
#[arg(help = format!("Subtitle language. Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Subtitle language. If set, the subtitle will be burned into the video and cannot be disabled. \
Available languages are: {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(short, long)]
pub(crate) subtitle: Option<Locale>,
#[arg(skip)]
output_subtitle_locale: String,
#[arg(help = "Name of the output file")]
#[arg(long_help = "Name of the output file. \
If you use one of the following pattern they will get replaced:\n \
{title} Title of the video\n \
{series_name} Name of the series\n \
{season_name} Name of the season\n \
{audio} Audio language of the video\n \
{width} Width of the video\n \
{height} Height of the video\n \
{season_number} Number of the season\n \
{episode_number} Number of the episode\n \
{relative_episode_number} Number of the episode relative to its season\n \
{sequence_number} Like '{episode_number}' but without possible non-number characters\n \
{relative_sequence_number} Like '{relative_episode_number}' but with support for episode 0's and .5's\n \
{release_year} Release year of the video\n \
{release_month} Release month of the video\n \
{release_day} Release day of the video\n \
{series_id} ID of the series\n \
{season_id} ID of the season\n \
{episode_id} ID of the episode")]
#[arg(short, long, default_value = "{title}.mp4")]
pub(crate) output: String,
#[arg(help = "Name of the output file if the episode is a special")]
#[arg(long_help = "Name of the output file if the episode is a special. \
If not set, the '-o'/'--output' flag will be used as name template")]
#[arg(long)]
pub(crate) output_specials: Option<String>,
#[arg(help = "Sanitize the output file for use with all operating systems. \
This option only affects template options and not static characters.")]
#[arg(long, default_value_t = false)]
pub(crate) universal_output: bool,
#[arg(help = "Video resolution")]
#[arg(long_help = "The video resolution. \
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
Specifying the exact pixels is not recommended, use one of the other options instead. \
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
The available common-use words are 'best' (choose the best resolution available) and 'worst' (worst resolution available)")]
#[arg(short, long, default_value = "best")]
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
pub(crate) resolution: Resolution,
#[arg(
long,
help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard)"
)]
#[arg(
long_help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard; you might run in issues as there are multiple locales which resolve to the same IETF language code, e.g. 'es-LA' and 'es-ES' are both resolving to 'es')"
)]
#[arg(value_parser = LanguageTagging::parse)]
pub(crate) language_tagging: Option<LanguageTagging>,
#[arg(help = format!("Presets for converting the video to a specific coding format. \
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
#[arg(long_help = format!("Presets for converting the video to a specific coding format. \
If you need more specific ffmpeg customizations you can pass ffmpeg output arguments instead of a preset as value. \
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
#[arg(long)]
#[arg(value_parser = FFmpegPreset::parse)]
pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
#[arg(
help = "The number of threads used by ffmpeg to generate the output file. Does not work with every codec/preset"
)]
#[arg(
long_help = "The number of threads used by ffmpeg to generate the output file. \
Does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`. \
By default, ffmpeg chooses the thread count which works best for the output codec"
)]
#[arg(long)]
pub(crate) ffmpeg_threads: Option<usize>,
#[arg(help = "Skip files which are already existing by their name")]
#[arg(long, default_value_t = false)]
pub(crate) skip_existing: bool,
#[arg(help = "Skip special episodes")]
#[arg(long, default_value_t = false)]
pub(crate) skip_specials: bool,
#[arg(help = "Includes chapters (e.g. intro, credits, ...)")]
#[arg(long_help = "Includes chapters (e.g. intro, credits, ...). \
Because chapters are essentially only special timeframes in episodes like the intro, most of the video timeline isn't covered by a chapter.
These \"gaps\" are filled with an 'Episode' chapter because many video players are ignore those gaps and just assume that a chapter ends when the next chapter start is reached, even if a specific end-time is set.
Also chapters aren't always available, so in this case, just a big 'Episode' chapter from start to end will be created")]
#[arg(long, default_value_t = false)]
pub(crate) include_chapters: bool,
#[arg(help = "Skip any interactive input")]
#[arg(short, long, default_value_t = false)]
pub(crate) yes: bool,
#[arg(help = "Force subtitles to be always burnt-in")]
#[arg(long, default_value_t = false)]
pub(crate) force_hardsub: bool,
#[arg(help = "The number of threads used to download")]
#[arg(short, long, default_value_t = num_cpus::get())]
pub(crate) threads: usize,
#[arg(help = "Url(s) to Crunchyroll episodes or series")]
#[arg(required = true)]
pub(crate) urls: Vec<String>,
}
impl Execute for Download {
fn pre_check(&mut self) -> Result<()> {
if !has_ffmpeg() {
bail!("FFmpeg is needed to run this command")
} else if Path::new(&self.output)
.extension()
.unwrap_or_default()
.is_empty()
&& !is_special_file(&self.output)
&& self.output != "-"
{
bail!("No file extension found. Please specify a file extension (via `-o`) for the output file")
}
if self.subtitle.is_some() {
if let Some(ext) = Path::new(&self.output).extension() {
if self.force_hardsub {
warn!("Hardsubs are forced. Adding subtitles may take a while")
} else if !["mkv", "mov", "mp4"].contains(&ext.to_string_lossy().as_ref()) {
warn!("Detected a container which does not support softsubs. Adding subtitles may take a while")
}
}
}
if let Some(special_output) = &self.output_specials {
if Path::new(special_output)
.extension()
.unwrap_or_default()
.is_empty()
&& !is_special_file(special_output)
&& special_output != "-"
{
bail!("No file extension found. Please specify a file extension (via `--output-specials`) for the output file")
}
if let Some(ext) = Path::new(special_output).extension() {
if self.force_hardsub {
warn!("Hardsubs are forced for special episodes. Adding subtitles may take a while")
} else if !["mkv", "mov", "mp4"].contains(&ext.to_string_lossy().as_ref()) {
warn!("Detected a container which does not support softsubs. Adding subtitles for special episodes may take a while")
}
}
}
if let Some(language_tagging) = &self.language_tagging {
self.audio = resolve_locales(&[self.audio.clone()]).remove(0);
self.subtitle = self
.subtitle
.as_ref()
.map(|s| resolve_locales(&[s.clone()]).remove(0));
self.output_audio_locale = language_tagging.for_locale(&self.audio);
self.output_subtitle_locale = self
.subtitle
.as_ref()
.map(|s| language_tagging.for_locale(s))
.unwrap_or_default()
} else {
self.output_audio_locale = self.audio.to_string();
self.output_subtitle_locale = self
.subtitle
.as_ref()
.map(|s| s.to_string())
.unwrap_or_default();
}
Ok(())
}
async fn execute(self, ctx: Context) -> Result<()> {
if !ctx.crunchy.premium().await {
warn!("You may not be able to download all requested videos when logging in anonymously or using a non-premium account")
}
let mut parsed_urls = vec![];
let output_supports_softsubs = SOFTSUB_CONTAINERS.contains(
&Path::new(&self.output)
.extension()
.unwrap_or_default()
.to_string_lossy()
.as_ref(),
);
let special_output_supports_softsubs = if let Some(so) = &self.output_specials {
SOFTSUB_CONTAINERS.contains(
&Path::new(so)
.extension()
.unwrap_or_default()
.to_string_lossy()
.as_ref(),
)
} else {
output_supports_softsubs
};
for (i, url) in self.urls.clone().into_iter().enumerate() {
let progress_handler = progress!("Parsing url {}", i + 1);
match parse_url(&ctx.crunchy, url.clone(), true).await {
Ok((media_collection, url_filter)) => {
progress_handler.stop(format!("Parsed url {}", i + 1));
parsed_urls.push((media_collection, url_filter))
}
Err(e) => bail!("url {} could not be parsed: {}", url, e),
};
}
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
let progress_handler = progress!("Fetching series details");
let single_format_collection = Filter::new(
url_filter,
vec![self.audio.clone()],
self.subtitle.as_ref().map_or(vec![], |s| vec![s.clone()]),
|scope, locales| {
match scope {
FilterMediaScope::Series(series) => bail!("Series {} is not available with {} audio", series.title, locales[0]),
FilterMediaScope::Season(season) => {
error!("Season {} is not available with {} audio", season.season_number, locales[0]);
Ok(false)
}
FilterMediaScope::Episode(episodes) => {
if episodes.len() == 1 {
warn!("Episode {} of season {} is not available with {} audio", episodes[0].sequence_number, episodes[0].season_title, locales[0])
} else if episodes.len() == 2 {
warn!("Season {} is only available with {} audio from episode {} to {}", episodes[0].season_number, locales[0], episodes[0].sequence_number, episodes[1].sequence_number)
} else {
unimplemented!()
}
Ok(false)
}
}
},
|scope, locales| {
match scope {
FilterMediaScope::Series(series) => bail!("Series {} is not available with {} subtitles", series.title, locales[0]),
FilterMediaScope::Season(season) => {
warn!("Season {} is not available with {} subtitles", season.season_number, locales[0]);
Ok(false)
},
FilterMediaScope::Episode(episodes) => {
if episodes.len() == 1 {
warn!("Episode {} of season {} is not available with {} subtitles", episodes[0].sequence_number, episodes[0].season_title, locales[0])
} else if episodes.len() == 2 {
warn!("Season {} is only available with {} subtitles from episode {} to {}", episodes[0].season_number, locales[0], episodes[0].sequence_number, episodes[1].sequence_number)
} else {
unimplemented!()
}
Ok(false)
}
}
},
|season| {
warn!("Skipping premium episodes in season {season}");
Ok(())
},
Format::has_relative_fmt(&self.output),
!self.yes,
self.skip_specials,
ctx.crunchy.premium().await,
)
.visit(media_collection)
.await?;
if single_format_collection.is_empty() {
progress_handler.stop(format!("Skipping url {} (no matching videos found)", i + 1));
continue;
}
progress_handler.stop(format!("Loaded series information for url {}", i + 1));
single_format_collection.full_visual_output();
let download_builder =
DownloadBuilder::new(ctx.client.clone(), ctx.rate_limiter.clone())
.default_subtitle(self.subtitle.clone())
.force_hardsub(self.force_hardsub)
.output_format(if is_special_file(&self.output) || self.output == "-" {
Some("mpegts".to_string())
} else {
None
})
.ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default())
.ffmpeg_threads(self.ffmpeg_threads)
.threads(self.threads)
.audio_locale_output_map(HashMap::from([(
self.audio.clone(),
self.output_audio_locale.clone(),
)]))
.subtitle_locale_output_map(
self.subtitle.as_ref().map_or(HashMap::new(), |s| {
HashMap::from([(s.clone(), self.output_subtitle_locale.clone())])
}),
);
for mut single_formats in single_format_collection.into_iter() {
// the vec contains always only one item
let single_format = single_formats.remove(0);
let (download_format, format) = get_format(
&self,
&single_format,
if self.force_hardsub {
true
} else if single_format.is_special() {
!special_output_supports_softsubs
} else {
!output_supports_softsubs
},
)
.await?;
let mut downloader = download_builder.clone().build();
downloader.add_format(download_format);
let formatted_path = if format.is_special() {
format.format_path(
self.output_specials
.as_ref()
.map_or((&self.output).into(), |so| so.into()),
self.universal_output,
self.language_tagging.as_ref(),
)
} else {
format.format_path(
(&self.output).into(),
self.universal_output,
self.language_tagging.as_ref(),
)
};
let (path, changed) = free_file(formatted_path.clone());
if changed && self.skip_existing {
debug!(
"Skipping already existing file '{}'",
formatted_path.to_string_lossy()
);
continue;
}
format.visual_output(&path);
downloader.download(&path).await?
}
}
Ok(())
}
}
async fn get_format(
download: &Download,
single_format: &SingleFormat,
try_peer_hardsubs: bool,
) -> Result<(DownloadFormat, Format)> {
let stream = single_format.stream().await?;
let Some((video, audio, contains_hardsub)) = stream_data_from_stream(
&stream,
&download.resolution,
if try_peer_hardsubs {
download.subtitle.clone()
} else {
None
},
)
.await?
else {
if single_format.is_episode() {
bail!(
"Resolution ({}) is not available for episode {} ({}) of {} season {}",
download.resolution,
single_format.episode_number,
single_format.title,
single_format.series_name,
single_format.season_number,
)
} else {
bail!(
"Resolution ({}) is not available for {} ({})",
download.resolution,
single_format.source_type(),
single_format.title
)
}
};
let subtitle = if contains_hardsub {
None
} else if let Some(subtitle_locale) = &download.subtitle {
if download.audio == Locale::ja_JP {
stream
.subtitles
.get(subtitle_locale)
// use closed captions as fallback if no actual subtitles are found
.or_else(|| stream.captions.get(subtitle_locale))
.cloned()
} else {
stream
.captions
.get(subtitle_locale)
.or_else(|| stream.subtitles.get(subtitle_locale))
.cloned()
}
} else {
None
};
let download_format = DownloadFormat {
video: (video.clone(), single_format.audio.clone()),
audios: vec![(audio, single_format.audio.clone())],
subtitles: subtitle.clone().map_or(vec![], |s| {
vec![(
s,
single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1,
)]
}),
metadata: DownloadFormatMetadata {
skip_events: if download.include_chapters {
single_format.skip_events().await?
} else {
None
},
},
};
let mut format = Format::from_single_formats(vec![(
single_format.clone(),
video,
subtitle.map_or(vec![], |s| {
vec![(
s,
single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1,
)]
}),
)]);
if contains_hardsub {
let (_, subs) = format.locales.get_mut(0).unwrap();
subs.push(download.subtitle.clone().unwrap())
}
stream.invalidate().await?;
Ok((download_format, format))
}

View file

@ -0,0 +1,3 @@
mod command;
pub use command::Download;

405
crunchy-cli-core/src/lib.rs Normal file
View file

@ -0,0 +1,405 @@
use crate::utils::context::Context;
use crate::utils::locale::system_locale;
use crate::utils::log::{progress, CliLogger};
use anyhow::bail;
use anyhow::Result;
use clap::{Parser, Subcommand};
use crunchyroll_rs::crunchyroll::CrunchyrollBuilder;
use crunchyroll_rs::error::Error;
use crunchyroll_rs::{Crunchyroll, Locale};
use log::{debug, error, warn, LevelFilter};
use reqwest::{Client, Proxy};
use std::{env, fs};
mod archive;
mod download;
mod login;
mod search;
mod utils;
use crate::utils::rate_limit::RateLimiterService;
pub use archive::Archive;
use dialoguer::console::Term;
pub use download::Download;
pub use login::Login;
pub use search::Search;
trait Execute {
fn pre_check(&mut self) -> Result<()> {
Ok(())
}
async fn execute(self, ctx: Context) -> Result<()>;
}
#[derive(Debug, Parser)]
#[clap(author, version = version(), about)]
#[clap(name = "crunchy-cli")]
pub struct Cli {
#[clap(flatten)]
verbosity: Verbosity,
#[arg(
help = "Overwrite the language in which results are returned. Default is your system language"
)]
#[arg(global = true, long)]
lang: Option<Locale>,
#[arg(
help = "Enable experimental fixes which may resolve some unexpected errors. Generally not recommended as this flag may crash the program completely"
)]
#[arg(
long_help = "Enable experimental fixes which may resolve some unexpected errors. \
It is not recommended to use this this flag regularly, it might cause unexpected errors which may crash the program completely. \
If everything works as intended this option isn't needed, but sometimes Crunchyroll mislabels \
the audio of a series/season or episode or returns a wrong season number. This is when using this option might help to solve the issue"
)]
#[arg(global = true, long, default_value_t = false)]
experimental_fixes: bool,
#[clap(flatten)]
login_method: login::LoginMethod,
#[arg(help = "Use a proxy to route all traffic through")]
#[arg(long_help = "Use a proxy to route all traffic through. \
Make sure that the proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself. \
Besides specifying a simple url, you also can partially control where a proxy should be used: '<url>:' only proxies api requests, ':<url>' only proxies download traffic, '<url>:<url>' proxies api requests through the first url and download traffic through the second url")]
#[arg(global = true, long, value_parser = crate::utils::clap::clap_parse_proxies)]
proxy: Option<(Option<Proxy>, Option<Proxy>)>,
#[arg(help = "Use custom user agent")]
#[arg(global = true, long)]
user_agent: Option<String>,
#[arg(
help = "Maximal speed to download/request (may be a bit off here and there). Must be in format of <number>[B|KB|MB]"
)]
#[arg(
long_help = "Maximal speed to download/request (may be a bit off here and there). Must be in format of <number>[B|KB|MB] (e.g. 500KB or 10MB)"
)]
#[arg(global = true, long, value_parser = crate::utils::clap::clap_parse_speed_limit)]
speed_limit: Option<u32>,
#[clap(subcommand)]
command: Command,
}
fn version() -> String {
let package_version = env!("CARGO_PKG_VERSION");
let git_commit_hash = env!("GIT_HASH");
let build_date = env!("BUILD_DATE");
if git_commit_hash.is_empty() {
package_version.to_string()
} else {
format!("{} ({} {})", package_version, git_commit_hash, build_date)
}
}
#[derive(Debug, Subcommand)]
enum Command {
Archive(Archive),
Download(Download),
Login(Login),
Search(Search),
}
#[derive(Debug, Parser)]
struct Verbosity {
#[arg(help = "Verbose output")]
#[arg(global = true, short, long)]
verbose: bool,
#[arg(help = "Quiet output. Does not print anything unless it's a error")]
#[arg(
long_help = "Quiet output. Does not print anything unless it's a error. Can be helpful if you pipe the output to stdout"
)]
#[arg(global = true, short, long)]
quiet: bool,
}
pub async fn main(args: &[String]) {
let mut cli: Cli = Cli::parse_from(args);
if cli.verbosity.verbose || cli.verbosity.quiet {
if cli.verbosity.verbose && cli.verbosity.quiet {
eprintln!("Output cannot be verbose ('-v') and quiet ('-q') at the same time");
std::process::exit(1)
} else if cli.verbosity.verbose {
CliLogger::init(LevelFilter::Debug).unwrap()
} else if cli.verbosity.quiet {
CliLogger::init(LevelFilter::Error).unwrap()
}
} else {
CliLogger::init(LevelFilter::Info).unwrap()
}
debug!("cli input: {:?}", cli);
match &mut cli.command {
Command::Archive(archive) => {
// prevent interactive select to be shown when output should be quiet
if cli.verbosity.quiet {
archive.yes = true;
}
pre_check_executor(archive).await
}
Command::Download(download) => {
// prevent interactive select to be shown when output should be quiet
if cli.verbosity.quiet {
download.yes = true;
}
pre_check_executor(download).await
}
Command::Login(login) => {
if login.remove {
if let Some(session_file) = login::session_file_path() {
let _ = fs::remove_file(session_file);
}
return;
} else {
pre_check_executor(login).await
}
}
Command::Search(search) => pre_check_executor(search).await,
};
let ctx = match create_ctx(&mut cli).await {
Ok(ctx) => ctx,
Err(e) => {
error!("{}", e);
std::process::exit(1)
}
};
debug!("Created context");
ctrlc::set_handler(move || {
debug!("Ctrl-c detected");
if let Ok(dir) = fs::read_dir(env::temp_dir()) {
for file in dir.flatten() {
if file
.path()
.file_name()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.starts_with(".crunchy-cli_")
{
if file.file_type().map_or(true, |ft| ft.is_file()) {
let result = fs::remove_file(file.path());
debug!(
"Ctrl-c removed temporary file {} {}",
file.path().to_string_lossy(),
if result.is_ok() {
"successfully"
} else {
"not successfully"
}
)
} else {
let result = fs::remove_dir_all(file.path());
debug!(
"Ctrl-c removed temporary directory {} {}",
file.path().to_string_lossy(),
if result.is_ok() {
"successfully"
} else {
"not successfully"
}
)
}
}
}
}
// when pressing ctrl-c while interactively choosing seasons the cursor stays hidden, this
// line shows it again
let _ = Term::stdout().show_cursor();
std::process::exit(1)
})
.unwrap();
debug!("Created ctrl-c handler");
match cli.command {
Command::Archive(archive) => execute_executor(archive, ctx).await,
Command::Download(download) => execute_executor(download, ctx).await,
Command::Login(login) => execute_executor(login, ctx).await,
Command::Search(search) => execute_executor(search, ctx).await,
};
}
async fn pre_check_executor(executor: &mut impl Execute) {
if let Err(err) = executor.pre_check() {
error!("Misconfigurations detected: {}", err);
std::process::exit(1)
}
}
async fn execute_executor(executor: impl Execute, ctx: Context) {
if let Err(mut err) = executor.execute(ctx).await {
if let Some(crunchy_error) = err.downcast_mut::<Error>() {
if let Error::Block { message, .. } = crunchy_error {
*message = "Triggered Cloudflare bot protection. Try again later or use a VPN or proxy to spoof your location".to_string()
}
error!("An error occurred: {}", crunchy_error)
} else {
error!("An error occurred: {}", err)
}
std::process::exit(1)
}
}
async fn create_ctx(cli: &mut Cli) -> Result<Context> {
let crunchy_client = reqwest_client(
cli.proxy.as_ref().and_then(|p| p.0.clone()),
cli.user_agent.clone(),
);
let internal_client = reqwest_client(
cli.proxy.as_ref().and_then(|p| p.1.clone()),
cli.user_agent.clone(),
);
let crunchy = crunchyroll_session(
cli,
crunchy_client.clone(),
cli.speed_limit
.map(|l| RateLimiterService::new(l, crunchy_client)),
)
.await?;
Ok(Context {
crunchy,
client: internal_client.clone(),
rate_limiter: cli
.speed_limit
.map(|l| RateLimiterService::new(l, internal_client)),
})
}
async fn crunchyroll_session(
cli: &mut Cli,
client: Client,
rate_limiter: Option<RateLimiterService>,
) -> Result<Crunchyroll> {
let supported_langs = vec![
Locale::ar_ME,
Locale::de_DE,
Locale::en_US,
Locale::es_ES,
Locale::es_419,
Locale::fr_FR,
Locale::it_IT,
Locale::pt_BR,
Locale::pt_PT,
Locale::ru_RU,
];
let locale = if let Some(lang) = &cli.lang {
if !supported_langs.contains(lang) {
bail!(
"Via `--lang` specified language is not supported. Supported languages: {}",
supported_langs
.iter()
.map(|l| format!("`{}` ({})", l, l.to_human_readable()))
.collect::<Vec<String>>()
.join(", ")
)
}
lang.clone()
} else {
let mut lang = system_locale();
if !supported_langs.contains(&lang) {
warn!("Recognized system locale is not supported. Using en-US as default. Use `--lang` to overwrite the used language");
lang = Locale::en_US
}
lang
};
let mut builder = Crunchyroll::builder()
.locale(locale)
.client(client.clone())
.stabilization_locales(cli.experimental_fixes)
.stabilization_season_number(cli.experimental_fixes);
if let Command::Download(download) = &cli.command {
builder = builder.preferred_audio_locale(download.audio.clone())
}
if let Some(rate_limiter) = rate_limiter {
builder = builder.middleware(rate_limiter)
}
let root_login_methods_count =
cli.login_method.credentials.is_some() as u8 + cli.login_method.anonymous as u8;
let progress_handler = progress!("Logging in");
if root_login_methods_count == 0 {
if let Some(login_file_path) = login::session_file_path() {
if login_file_path.exists() {
let session = fs::read_to_string(login_file_path)?;
if let Some((token_type, token)) = session.split_once(':') {
match token_type {
"refresh_token" => {
return match builder.login_with_refresh_token(token).await {
Ok(crunchy) => Ok(crunchy),
Err(e) => {
if let Error::Request { message, .. } = &e {
if message.starts_with("invalid_grant") {
bail!("The stored login is expired, please login again")
}
}
Err(e.into())
}
}
}
"etp_rt" => bail!("The stored login method (etp-rt) isn't supported anymore. Please login again using your credentials"),
_ => (),
}
}
bail!("Could not read stored session ('{}')", session)
}
}
bail!("Please use a login method ('--credentials' or '--anonymous')")
} else if root_login_methods_count > 1 {
bail!("Please use only one login method ('--credentials' or '--anonymous')")
}
let crunchy = if let Some(credentials) = &cli.login_method.credentials {
if let Some((email, password)) = credentials.split_once(':') {
builder.login_with_credentials(email, password).await?
} else {
bail!("Invalid credentials format. Please provide your credentials as email:password")
}
} else if cli.login_method.anonymous {
builder.login_anonymously().await?
} else {
bail!("should never happen")
};
progress_handler.stop("Logged in");
Ok(crunchy)
}
fn reqwest_client(proxy: Option<Proxy>, user_agent: Option<String>) -> Client {
let mut builder = CrunchyrollBuilder::predefined_client_builder();
if let Some(p) = proxy {
builder = builder.proxy(p)
}
if let Some(ua) = user_agent {
builder = builder.user_agent(ua)
}
#[cfg(any(feature = "openssl-tls", feature = "openssl-tls-static"))]
let client = {
let mut builder = builder.use_native_tls().tls_built_in_root_certs(false);
for certificate in rustls_native_certs::load_native_certs().unwrap() {
builder =
builder.add_root_certificate(reqwest::Certificate::from_der(&certificate).unwrap())
}
builder.build().unwrap()
};
#[cfg(not(any(feature = "openssl-tls", feature = "openssl-tls-static")))]
let client = builder.build().unwrap();
client
}

View file

@ -0,0 +1,55 @@
use crate::utils::context::Context;
use crate::Execute;
use anyhow::bail;
use anyhow::Result;
use clap::Parser;
use crunchyroll_rs::crunchyroll::SessionToken;
use log::info;
use std::fs;
use std::path::PathBuf;
#[derive(Debug, clap::Parser)]
#[clap(about = "Save your login credentials persistent on disk")]
pub struct Login {
#[arg(help = "Remove your stored credentials (instead of saving them)")]
#[arg(long)]
pub remove: bool,
}
impl Execute for Login {
async fn execute(self, ctx: Context) -> Result<()> {
if let Some(login_file_path) = session_file_path() {
fs::create_dir_all(login_file_path.parent().unwrap())?;
match ctx.crunchy.session_token().await {
SessionToken::RefreshToken(refresh_token) => {
fs::write(login_file_path, format!("refresh_token:{}", refresh_token))?
}
SessionToken::EtpRt(_) => bail!("Login with etp_rt isn't supported anymore. Please use your credentials to login"),
SessionToken::Anonymous => bail!("Anonymous login cannot be saved"),
}
info!("Saved login");
Ok(())
} else {
bail!("Cannot find config path")
}
}
}
#[derive(Clone, Debug, Parser)]
pub struct LoginMethod {
#[arg(
help = "Login with credentials (email and password). Must be provided as email:password"
)]
#[arg(global = true, long)]
pub credentials: Option<String>,
#[arg(help = "Login anonymously / without an account")]
#[arg(global = true, long, default_value_t = false)]
pub anonymous: bool,
}
pub fn session_file_path() -> Option<PathBuf> {
dirs::config_dir().map(|config_dir| config_dir.join("crunchy-cli").join("session"))
}

View file

@ -0,0 +1,3 @@
mod command;
pub use command::{session_file_path, Login, LoginMethod};

View file

@ -0,0 +1,222 @@
use crate::search::filter::FilterOptions;
use crate::search::format::Format;
use crate::utils::context::Context;
use crate::utils::parse::{parse_url, UrlFilter};
use crate::Execute;
use anyhow::{bail, Result};
use crunchyroll_rs::common::StreamExt;
use crunchyroll_rs::search::QueryResults;
use crunchyroll_rs::{Episode, Locale, MediaCollection, MovieListing, MusicVideo, Series};
use log::warn;
use std::sync::Arc;
#[derive(Debug, clap::Parser)]
#[clap(about = "Search in videos")]
#[command(arg_required_else_help(true))]
pub struct Search {
#[arg(help = format!("Audio languages to include. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Audio languages to include. \
Available languages are:\n {}", Locale::all().into_iter().map(|l| format!("{:<6} {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
#[arg(long, default_values_t = vec![crate::utils::locale::system_locale()])]
audio: Vec<Locale>,
#[arg(help = "Limit of search top search results")]
#[arg(long, default_value_t = 5)]
search_top_results_limit: u32,
#[arg(help = "Limit of search series results")]
#[arg(long, default_value_t = 0)]
search_series_limit: u32,
#[arg(help = "Limit of search movie listing results")]
#[arg(long, default_value_t = 0)]
search_movie_listing_limit: u32,
#[arg(help = "Limit of search episode results")]
#[arg(long, default_value_t = 0)]
search_episode_limit: u32,
#[arg(help = "Limit of search music results")]
#[arg(long, default_value_t = 0)]
search_music_limit: u32,
/// Format of the output text.
///
/// You can specify keywords in a specific pattern and they will get replaced in the output text.
/// The required pattern for this begins with `{{`, then the keyword, and closes with `}}` (e.g. `{{episode.title}}`).
/// For example, if you want to get the title of an episode, you can use `Title {{episode.title}}` and `{{episode.title}}` will be replaced with the episode title
///
/// See the following list for all keywords and their meaning:
/// series.id → Series id
/// series.title → Series title
/// series.description → Series description
/// series.release_year → Series release year
///
/// season.id → Season id
/// season.title → Season title
/// season.description → Season description
/// season.number → Season number
/// season.episodes → Number of episodes the season has
///
/// episode.id → Episode id
/// episode.title → Episode title
/// episode.description → Episode description
/// episode.locale → Episode locale/language
/// episode.number → Episode number
/// episode.sequence_number → Episode number. This number is unique unlike `episode.number` which sometimes can be duplicated
/// episode.duration → Episode duration in milliseconds
/// episode.air_date → Episode air date as unix timestamp
/// episode.premium_only → If the episode is only available with Crunchyroll premium
///
/// movie_listing.id → Movie listing id
/// movie_listing.title → Movie listing title
/// movie_listing.description → Movie listing description
///
/// movie.id → Movie id
/// movie.title → Movie title
/// movie.description → Movie description
/// movie.duration → Movie duration in milliseconds
/// movie.premium_only → If the movie is only available with Crunchyroll premium
///
/// music_video.id → Music video id
/// music_video.title → Music video title
/// music_video.description → Music video description
/// music_video.duration → Music video duration in milliseconds
/// music_video.premium_only → If the music video is only available with Crunchyroll premium
///
/// concert.id → Concert id
/// concert.title → Concert title
/// concert.description → Concert description
/// concert.duration → Concert duration in milliseconds
/// concert.premium_only → If the concert is only available with Crunchyroll premium
///
/// stream.locale → Stream locale/language
/// stream.dash_url → Stream url in DASH format. You need to set the `Authorization` header to `Bearer <account.token>` when requesting this url
/// stream.is_drm → If `stream.dash_url` is DRM encrypted
///
/// subtitle.locale → Subtitle locale/language
/// subtitle.url → Url to the subtitle
///
/// account.token → Access token to make request to restricted endpoints. This token is only valid for a max. of 5 minutes
/// account.id → Internal ID of the user account
/// account.profile_name → Profile name of the account
/// account.email → Email address of the account
#[arg(short, long, verbatim_doc_comment)]
#[arg(default_value = "S{{season.number}}E{{episode.number}} - {{episode.title}}")]
output: String,
input: String,
}
impl Execute for Search {
async fn execute(self, ctx: Context) -> Result<()> {
if !ctx.crunchy.premium().await {
warn!("Using `search` anonymously or with a non-premium account may return incomplete results")
}
if self.output.contains("{{stream.is_drm}}") {
warn!("The `{{{{stream.is_drm}}}}` option is deprecated as it isn't reliable anymore and will be removed soon")
}
let input = if crunchyroll_rs::parse::parse_url(&self.input).is_some() {
match parse_url(&ctx.crunchy, self.input.clone(), true).await {
Ok(ok) => vec![ok],
Err(e) => bail!("url {} could not be parsed: {}", self.input, e),
}
} else {
let mut output = vec![];
let query = resolve_query(&self, ctx.crunchy.query(&self.input)).await?;
output.extend(query.0.into_iter().map(|m| (m, UrlFilter::default())));
output.extend(
query
.1
.into_iter()
.map(|s| (s.into(), UrlFilter::default())),
);
output.extend(
query
.2
.into_iter()
.map(|m| (m.into(), UrlFilter::default())),
);
output.extend(
query
.3
.into_iter()
.map(|e| (e.into(), UrlFilter::default())),
);
output.extend(
query
.4
.into_iter()
.map(|m| (m.into(), UrlFilter::default())),
);
output
};
let crunchy_arc = Arc::new(ctx.crunchy);
for (media_collection, url_filter) in input {
let filter_options = FilterOptions {
audio: self.audio.clone(),
url_filter,
};
let format = Format::new(self.output.clone(), filter_options, crunchy_arc.clone())?;
println!("{}", format.parse(media_collection).await?);
}
Ok(())
}
}
macro_rules! resolve_query {
($limit:expr, $vec:expr, $item:expr) => {
if $limit > 0 {
let mut item_results = $item;
while let Some(item) = item_results.next().await {
$vec.push(item?);
if $vec.len() >= $limit as usize {
break;
}
}
}
};
}
async fn resolve_query(
search: &Search,
query_results: QueryResults,
) -> Result<(
Vec<MediaCollection>,
Vec<Series>,
Vec<MovieListing>,
Vec<Episode>,
Vec<MusicVideo>,
)> {
let mut media_collection = vec![];
let mut series = vec![];
let mut movie_listing = vec![];
let mut episode = vec![];
let mut music_video = vec![];
resolve_query!(
search.search_top_results_limit,
media_collection,
query_results.top_results
);
resolve_query!(search.search_series_limit, series, query_results.series);
resolve_query!(
search.search_movie_listing_limit,
movie_listing,
query_results.movie_listing
);
resolve_query!(search.search_episode_limit, episode, query_results.episode);
resolve_query!(search.search_music_limit, music_video, query_results.music);
Ok((
media_collection,
series,
movie_listing,
episode,
music_video,
))
}

View file

@ -0,0 +1,47 @@
use crate::utils::parse::UrlFilter;
use crunchyroll_rs::{Episode, Locale, MovieListing, Season, Series};
pub struct FilterOptions {
pub audio: Vec<Locale>,
pub url_filter: UrlFilter,
}
impl FilterOptions {
pub fn check_series(&self, series: &Series) -> bool {
self.check_audio_language(&series.audio_locales)
}
pub fn filter_seasons(&self, mut seasons: Vec<Season>) -> Vec<Season> {
seasons.retain(|s| {
self.check_audio_language(&s.audio_locales)
&& self.url_filter.is_season_valid(s.season_number)
});
seasons
}
pub fn filter_episodes(&self, mut episodes: Vec<Episode>) -> Vec<Episode> {
episodes.retain(|e| {
self.check_audio_language(&[e.audio_locale.clone()])
&& self
.url_filter
.is_episode_valid(e.sequence_number, e.season_number)
});
episodes
}
pub fn check_movie_listing(&self, movie_listing: &MovieListing) -> bool {
self.check_audio_language(
&movie_listing
.audio_locale
.clone()
.map_or(vec![], |a| vec![a.clone()]),
)
}
fn check_audio_language(&self, audio: &[Locale]) -> bool {
if !self.audio.is_empty() {
return self.audio.iter().any(|a| audio.contains(a));
}
true
}
}

View file

@ -0,0 +1,687 @@
use crate::search::filter::FilterOptions;
use anyhow::{bail, Result};
use crunchyroll_rs::media::{Stream, Subtitle};
use crunchyroll_rs::{
Concert, Crunchyroll, Episode, Locale, MediaCollection, Movie, MovieListing, MusicVideo,
Season, Series,
};
use regex::Regex;
use serde::Serialize;
use serde_json::{Map, Value};
use std::collections::HashMap;
use std::ops::Range;
use std::sync::Arc;
#[derive(Default, Serialize)]
struct FormatSeries {
pub id: String,
pub title: String,
pub description: String,
pub release_year: u32,
}
impl From<&Series> for FormatSeries {
fn from(value: &Series) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
description: value.description.clone(),
release_year: value.series_launch_year.unwrap_or_default(),
}
}
}
#[derive(Default, Serialize)]
struct FormatSeason {
pub id: String,
pub title: String,
pub description: String,
pub number: u32,
pub episodes: u32,
}
impl From<&Season> for FormatSeason {
fn from(value: &Season) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
description: value.description.clone(),
number: value.season_number,
episodes: value.number_of_episodes,
}
}
}
#[derive(Default, Serialize)]
struct FormatEpisode {
pub id: String,
pub title: String,
pub description: String,
pub locale: Locale,
pub number: u32,
pub sequence_number: f32,
pub duration: i64,
pub air_date: i64,
pub premium_only: bool,
}
impl From<&Episode> for FormatEpisode {
fn from(value: &Episode) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
description: value.description.clone(),
locale: value.audio_locale.clone(),
number: value.episode_number.unwrap_or_default(),
sequence_number: value.sequence_number,
duration: value.duration.num_milliseconds(),
air_date: value.episode_air_date.timestamp(),
premium_only: value.is_premium_only,
}
}
}
#[derive(Default, Serialize)]
struct FormatMovieListing {
pub id: String,
pub title: String,
pub description: String,
}
impl From<&MovieListing> for FormatMovieListing {
fn from(value: &MovieListing) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
description: value.description.clone(),
}
}
}
#[derive(Default, Serialize)]
struct FormatMovie {
pub id: String,
pub title: String,
pub description: String,
pub duration: i64,
pub premium_only: bool,
}
impl From<&Movie> for FormatMovie {
fn from(value: &Movie) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
description: value.description.clone(),
duration: value.duration.num_milliseconds(),
premium_only: value.is_premium_only,
}
}
}
#[derive(Default, Serialize)]
struct FormatMusicVideo {
pub id: String,
pub title: String,
pub description: String,
pub duration: i64,
pub premium_only: bool,
}
impl From<&MusicVideo> for FormatMusicVideo {
fn from(value: &MusicVideo) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
description: value.description.clone(),
duration: value.duration.num_milliseconds(),
premium_only: value.is_premium_only,
}
}
}
#[derive(Default, Serialize)]
struct FormatConcert {
pub id: String,
pub title: String,
pub description: String,
pub duration: i64,
pub premium_only: bool,
}
impl From<&Concert> for FormatConcert {
fn from(value: &Concert) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
description: value.description.clone(),
duration: value.duration.num_milliseconds(),
premium_only: value.is_premium_only,
}
}
}
#[derive(Default, Serialize)]
struct FormatStream {
pub locale: Locale,
pub dash_url: String,
pub is_drm: bool,
}
impl From<&Stream> for FormatStream {
fn from(value: &Stream) -> Self {
Self {
locale: value.audio_locale.clone(),
dash_url: value.url.clone(),
is_drm: false,
}
}
}
#[derive(Default, Serialize)]
struct FormatSubtitle {
pub locale: Locale,
pub url: String,
}
impl From<&Subtitle> for FormatSubtitle {
fn from(value: &Subtitle) -> Self {
Self {
locale: value.locale.clone(),
url: value.url.clone(),
}
}
}
#[derive(Default, Serialize)]
struct FormatAccount {
pub token: String,
pub id: String,
pub profile_name: String,
pub email: String,
}
impl FormatAccount {
pub async fn async_from(value: &Crunchyroll) -> Result<Self> {
let account = value.account().await?;
Ok(Self {
token: value.access_token().await,
id: account.account_id,
profile_name: account.profile_name,
email: account.email,
})
}
}
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
enum Scope {
Series,
Season,
Episode,
MovieListing,
Movie,
MusicVideo,
Concert,
Stream,
Subtitle,
Account,
}
macro_rules! must_match_if_true {
($condition:expr => $media_collection:ident | $field:pat => $expr:expr) => {
if $condition {
match &$media_collection {
$field => Some($expr),
_ => panic!(),
}
} else {
None
}
};
}
pub struct Format {
pattern: Vec<(Range<usize>, Scope, String)>,
pattern_count: HashMap<Scope, u32>,
input: String,
filter_options: FilterOptions,
crunchyroll: Arc<Crunchyroll>,
}
impl Format {
pub fn new(
input: String,
filter_options: FilterOptions,
crunchyroll: Arc<Crunchyroll>,
) -> Result<Self> {
let scope_regex = Regex::new(r"(?m)\{\{\s*(?P<scope>\w+)\.(?P<field>\w+)\s*}}").unwrap();
let mut pattern = vec![];
let mut pattern_count = HashMap::new();
macro_rules! generate_field_check {
($($scope:expr => $struct_:ident)+) => {
HashMap::from([
$(
(
$scope,
serde_json::from_value::<Map<String, Value>>(serde_json::to_value($struct_::default()).unwrap()).unwrap()
)
),+
])
};
}
let field_check = generate_field_check!(
Scope::Series => FormatSeries
Scope::Season => FormatSeason
Scope::Episode => FormatEpisode
Scope::MovieListing => FormatMovieListing
Scope::Movie => FormatMovie
Scope::MusicVideo => FormatMusicVideo
Scope::Concert => FormatConcert
Scope::Stream => FormatStream
Scope::Subtitle => FormatSubtitle
Scope::Account => FormatAccount
);
for capture in scope_regex.captures_iter(&input) {
let full = capture.get(0).unwrap();
let scope = capture.name("scope").unwrap().as_str();
let field = capture.name("field").unwrap().as_str();
let format_pattern_scope = match scope {
"series" => Scope::Series,
"season" => Scope::Season,
"episode" => Scope::Episode,
"movie_listing" => Scope::MovieListing,
"movie" => Scope::Movie,
"music_video" => Scope::MusicVideo,
"concert" => Scope::Concert,
"stream" => Scope::Stream,
"subtitle" => Scope::Subtitle,
"account" => Scope::Account,
_ => bail!("'{}.{}' is not a valid keyword", scope, field),
};
if field_check
.get(&format_pattern_scope)
.unwrap()
.get(field)
.is_none()
{
bail!("'{}.{}' is not a valid keyword", scope, field)
}
pattern.push((
full.start()..full.end(),
format_pattern_scope.clone(),
field.to_string(),
));
*pattern_count.entry(format_pattern_scope).or_default() += 1
}
Ok(Self {
pattern,
pattern_count,
input,
filter_options,
crunchyroll,
})
}
pub async fn parse(&self, media_collection: MediaCollection) -> Result<String> {
match &media_collection {
MediaCollection::Series(_)
| MediaCollection::Season(_)
| MediaCollection::Episode(_) => {
self.check_scopes(vec![
Scope::Series,
Scope::Season,
Scope::Episode,
Scope::Stream,
Scope::Subtitle,
Scope::Account,
])?;
self.parse_series(media_collection).await
}
MediaCollection::MovieListing(_) | MediaCollection::Movie(_) => {
self.check_scopes(vec![
Scope::MovieListing,
Scope::Movie,
Scope::Stream,
Scope::Subtitle,
Scope::Account,
])?;
self.parse_movie_listing(media_collection).await
}
MediaCollection::MusicVideo(_) => {
self.check_scopes(vec![
Scope::MusicVideo,
Scope::Stream,
Scope::Subtitle,
Scope::Account,
])?;
self.parse_music_video(media_collection).await
}
MediaCollection::Concert(_) => {
self.check_scopes(vec![
Scope::Concert,
Scope::Stream,
Scope::Subtitle,
Scope::Account,
])?;
self.parse_concert(media_collection).await
}
}
}
async fn parse_series(&self, media_collection: MediaCollection) -> Result<String> {
let series_empty = self.check_pattern_count_empty(Scope::Series);
let season_empty = self.check_pattern_count_empty(Scope::Season);
let episode_empty = self.check_pattern_count_empty(Scope::Episode);
let stream_empty = self.check_pattern_count_empty(Scope::Stream)
&& self.check_pattern_count_empty(Scope::Subtitle);
let account_empty = self.check_pattern_count_empty(Scope::Account);
#[allow(clippy::type_complexity)]
let mut tree: Vec<(Season, Vec<(Episode, Vec<Stream>)>)> = vec![];
let series = if !series_empty {
let series = match &media_collection {
MediaCollection::Series(series) => series.clone(),
MediaCollection::Season(season) => season.series().await?,
MediaCollection::Episode(episode) => episode.series().await?,
_ => panic!(),
};
if !self.filter_options.check_series(&series) {
return Ok("".to_string());
}
series
} else {
Series::default()
};
if !season_empty || !episode_empty || !stream_empty {
let tmp_seasons = match &media_collection {
MediaCollection::Series(series) => series.seasons().await?,
MediaCollection::Season(season) => vec![season.clone()],
MediaCollection::Episode(_) => vec![],
_ => panic!(),
};
let mut seasons = vec![];
for season in tmp_seasons {
seasons.push(season.clone());
for version in season.versions {
if season.id == version.id {
continue;
}
if self.filter_options.audio.contains(&version.audio_locale) {
seasons.push(version.season().await?)
}
}
}
tree.extend(
self.filter_options
.filter_seasons(seasons)
.into_iter()
.map(|s| (s, vec![])),
)
} else {
tree.push((Season::default(), vec![]))
}
if !episode_empty || !stream_empty {
match &media_collection {
MediaCollection::Episode(episode) => {
let mut episodes = vec![episode.clone()];
for version in &episode.versions {
if episode.id == version.id {
continue;
}
if self.filter_options.audio.contains(&version.audio_locale) {
episodes.push(version.episode().await?)
}
}
tree.push((
Season::default(),
episodes
.into_iter()
.filter(|e| self.filter_options.audio.contains(&e.audio_locale))
.map(|e| (e, vec![]))
.collect(),
))
}
_ => {
for (season, episodes) in tree.iter_mut() {
episodes.extend(
self.filter_options
.filter_episodes(season.episodes().await?)
.into_iter()
.map(|e| (e, vec![])),
)
}
}
};
} else {
for (_, episodes) in tree.iter_mut() {
episodes.push((Episode::default(), vec![]))
}
}
if !stream_empty {
for (_, episodes) in tree.iter_mut() {
for (episode, streams) in episodes {
let stream = episode.stream_maybe_without_drm().await?;
stream.clone().invalidate().await?;
streams.push(stream)
}
}
} else {
for (_, episodes) in tree.iter_mut() {
for (_, streams) in episodes {
streams.push(Stream::default())
}
}
}
let mut output = vec![];
let account_map = if !account_empty {
self.serializable_to_json_map(FormatAccount::async_from(&self.crunchyroll).await?)
} else {
Map::default()
};
let series_map = self.serializable_to_json_map(FormatSeries::from(&series));
for (season, episodes) in tree {
let season_map = self.serializable_to_json_map(FormatSeason::from(&season));
for (episode, streams) in episodes {
let episode_map = self.serializable_to_json_map(FormatEpisode::from(&episode));
for stream in streams {
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
output.push(
self.replace_all(
HashMap::from([
(Scope::Account, &account_map),
(Scope::Series, &series_map),
(Scope::Season, &season_map),
(Scope::Episode, &episode_map),
(Scope::Stream, &stream_map),
]),
stream,
)
.unwrap_or_default(),
)
}
}
}
Ok(output.join("\n"))
}
async fn parse_movie_listing(&self, media_collection: MediaCollection) -> Result<String> {
let movie_listing_empty = self.check_pattern_count_empty(Scope::MovieListing);
let movie_empty = self.check_pattern_count_empty(Scope::Movie);
let stream_empty = self.check_pattern_count_empty(Scope::Stream);
let mut tree: Vec<(Movie, Vec<Stream>)> = vec![];
let movie_listing = if !movie_listing_empty {
let movie_listing = match &media_collection {
MediaCollection::MovieListing(movie_listing) => movie_listing.clone(),
MediaCollection::Movie(movie) => movie.movie_listing().await?,
_ => panic!(),
};
if !self.filter_options.check_movie_listing(&movie_listing) {
return Ok("".to_string());
}
movie_listing
} else {
MovieListing::default()
};
if !movie_empty || !stream_empty {
let movies = match &media_collection {
MediaCollection::MovieListing(movie_listing) => movie_listing.movies().await?,
MediaCollection::Movie(movie) => vec![movie.clone()],
_ => panic!(),
};
tree.extend(movies.into_iter().map(|m| (m, vec![])))
}
if !stream_empty {
for (movie, streams) in tree.iter_mut() {
streams.push(movie.stream_maybe_without_drm().await?)
}
} else {
for (_, streams) in tree.iter_mut() {
streams.push(Stream::default())
}
}
let mut output = vec![];
let movie_listing_map =
self.serializable_to_json_map(FormatMovieListing::from(&movie_listing));
for (movie, streams) in tree {
let movie_map = self.serializable_to_json_map(FormatMovie::from(&movie));
for stream in streams {
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
output.push(
self.replace_all(
HashMap::from([
(Scope::MovieListing, &movie_listing_map),
(Scope::Movie, &movie_map),
(Scope::Stream, &stream_map),
]),
stream,
)
.unwrap_or_default(),
)
}
}
Ok(output.join("\n"))
}
async fn parse_music_video(&self, media_collection: MediaCollection) -> Result<String> {
let music_video_empty = self.check_pattern_count_empty(Scope::MusicVideo);
let stream_empty = self.check_pattern_count_empty(Scope::Stream);
let music_video = must_match_if_true!(!music_video_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.clone()).unwrap_or_default();
let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.stream_maybe_without_drm().await?).unwrap_or_default();
let music_video_map = self.serializable_to_json_map(FormatMusicVideo::from(&music_video));
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
let output = self
.replace_all(
HashMap::from([
(Scope::MusicVideo, &music_video_map),
(Scope::Stream, &stream_map),
]),
stream,
)
.unwrap_or_default();
Ok(output)
}
async fn parse_concert(&self, media_collection: MediaCollection) -> Result<String> {
let concert_empty = self.check_pattern_count_empty(Scope::Concert);
let stream_empty = self.check_pattern_count_empty(Scope::Stream);
let concert = must_match_if_true!(!concert_empty => media_collection|MediaCollection::Concert(concert) => concert.clone()).unwrap_or_default();
let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::Concert(concert) => concert.stream_maybe_without_drm().await?).unwrap_or_default();
let concert_map = self.serializable_to_json_map(FormatConcert::from(&concert));
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
let output = self
.replace_all(
HashMap::from([(Scope::Concert, &concert_map), (Scope::Stream, &stream_map)]),
stream,
)
.unwrap_or_default();
Ok(output)
}
fn serializable_to_json_map<S: Serialize>(&self, s: S) -> Map<String, Value> {
serde_json::from_value(serde_json::to_value(s).unwrap()).unwrap()
}
fn check_pattern_count_empty(&self, scope: Scope) -> bool {
self.pattern_count.get(&scope).cloned().unwrap_or_default() == 0
}
fn check_scopes(&self, available_scopes: Vec<Scope>) -> Result<()> {
for (_, scope, field) in self.pattern.iter() {
if !available_scopes.contains(scope) {
bail!(
"'{}.{}' is not a valid keyword",
format!("{:?}", scope).to_lowercase(),
field
)
}
}
Ok(())
}
fn replace_all(
&self,
values: HashMap<Scope, &Map<String, Value>>,
mut stream: Stream,
) -> Option<String> {
if stream.subtitles.is_empty() {
if !self.check_pattern_count_empty(Scope::Subtitle) {
return None;
}
stream
.subtitles
.insert(Locale::Custom("".to_string()), Subtitle::default());
}
let mut output = vec![];
for (_, subtitle) in stream.subtitles {
let subtitle_map = self.serializable_to_json_map(FormatSubtitle::from(&subtitle));
let mut tmp_values = values.clone();
tmp_values.insert(Scope::Subtitle, &subtitle_map);
output.push(self.replace(tmp_values))
}
Some(output.join("\n"))
}
fn replace(&self, values: HashMap<Scope, &Map<String, Value>>) -> String {
let mut output = self.input.clone();
let mut offset = 0;
for (range, scope, field) in &self.pattern {
let item =
serde_plain::to_string(values.get(scope).unwrap().get(field.as_str()).unwrap())
.unwrap();
let start = (range.start as i32 + offset) as usize;
let end = (range.end as i32 + offset) as usize;
output.replace_range(start..end, &item);
offset += item.len() as i32 - range.len() as i32;
}
output
}
}

View file

@ -0,0 +1,5 @@
mod command;
mod filter;
mod format;
pub use command::Search;

View file

@ -0,0 +1,61 @@
use crate::utils::parse::parse_resolution;
use crunchyroll_rs::media::Resolution;
use regex::Regex;
use reqwest::Proxy;
pub fn clap_parse_resolution(s: &str) -> Result<Resolution, String> {
parse_resolution(s.to_string()).map_err(|e| e.to_string())
}
pub fn clap_parse_proxies(s: &str) -> Result<(Option<Proxy>, Option<Proxy>), String> {
let double_proxy_regex =
Regex::new(r"^(?P<first>(https?|socks5h?)://.+):(?P<second>(https?|socks5h?)://.+)$")
.unwrap();
if let Some(capture) = double_proxy_regex.captures(s) {
// checks if the input is formatted like 'https://example.com:socks5://examples.com' and
// splits the string into 2 separate proxies at the middle colon
let first = capture.name("first").unwrap().as_str();
let second = capture.name("second").unwrap().as_str();
Ok((
Some(Proxy::all(first).map_err(|e| format!("first proxy: {e}"))?),
Some(Proxy::all(second).map_err(|e| format!("second proxy: {e}"))?),
))
} else if s.starts_with(':') {
// checks if the input is formatted like ':https://example.com' and returns a proxy on the
// second tuple position
Ok((
None,
Some(Proxy::all(s.trim_start_matches(':')).map_err(|e| e.to_string())?),
))
} else if s.ends_with(':') {
// checks if the input is formatted like 'https://example.com:' and returns a proxy on the
// first tuple position
Ok((
Some(Proxy::all(s.trim_end_matches(':')).map_err(|e| e.to_string())?),
None,
))
} else {
// returns the same proxy for both tuple positions
let proxy = Proxy::all(s).map_err(|e| e.to_string())?;
Ok((Some(proxy.clone()), Some(proxy)))
}
}
pub fn clap_parse_speed_limit(s: &str) -> Result<u32, String> {
let quota = s.to_lowercase();
let bytes = if let Ok(b) = quota.parse() {
b
} else if let Ok(b) = quota.trim_end_matches('b').parse::<u32>() {
b
} else if let Ok(kb) = quota.trim_end_matches("kb").parse::<u32>() {
kb * 1024
} else if let Ok(mb) = quota.trim_end_matches("mb").parse::<u32>() {
mb * 1024 * 1024
} else {
return Err("Invalid speed limit".to_string());
};
Ok(bytes)
}

View file

@ -0,0 +1,9 @@
use crate::utils::rate_limit::RateLimiterService;
use crunchyroll_rs::Crunchyroll;
use reqwest::Client;
pub struct Context {
pub crunchy: Crunchyroll,
pub client: Client,
pub rate_limiter: Option<RateLimiterService>,
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,381 @@
use lazy_static::lazy_static;
use regex::Regex;
use std::fmt;
use std::fmt::Formatter;
use std::str::FromStr;
pub const SOFTSUB_CONTAINERS: [&str; 3] = ["mkv", "mov", "mp4"];
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum FFmpegPreset {
Predefined(FFmpegCodec, Option<FFmpegHwAccel>, FFmpegQuality),
Custom(Option<String>),
}
lazy_static! {
static ref PREDEFINED_PRESET: Regex = Regex::new(r"^\w+(-\w+)*?$").unwrap();
}
macro_rules! ffmpeg_enum {
(enum $name:ident { $($field:ident),* }) => {
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub enum $name {
$(
$field
),*,
}
impl $name {
fn all() -> Vec<$name> {
vec![
$(
$name::$field
),*,
]
}
}
impl fmt::Display for $name {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
$(
&$name::$field => write!(f, "{}", stringify!($field).to_string().to_lowercase())
),*
}
}
}
impl FromStr for $name {
type Err = anyhow::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s {
$(
stringify!($field) => Ok($name::$field)
),*,
_ => anyhow::bail!("{} is not a valid {}", s, stringify!($name).to_lowercase())
}
}
}
}
}
ffmpeg_enum! {
enum FFmpegCodec {
H264,
H265,
Av1
}
}
ffmpeg_enum! {
enum FFmpegHwAccel {
Nvidia,
Amd,
Apple
}
}
ffmpeg_enum! {
enum FFmpegQuality {
Lossless,
Normal,
Low
}
}
impl Default for FFmpegPreset {
fn default() -> Self {
Self::Custom(Some("-c:v copy -c:a copy".to_string()))
}
}
impl FFmpegPreset {
pub(crate) fn available_matches(
) -> Vec<(FFmpegCodec, Option<FFmpegHwAccel>, Option<FFmpegQuality>)> {
let codecs = vec![
(
FFmpegCodec::H264,
FFmpegHwAccel::all(),
FFmpegQuality::all(),
),
(
FFmpegCodec::H265,
FFmpegHwAccel::all(),
FFmpegQuality::all(),
),
(
FFmpegCodec::Av1,
vec![FFmpegHwAccel::Amd],
FFmpegQuality::all(),
),
];
let mut return_values = vec![];
for (codec, hwaccels, qualities) in codecs {
return_values.push((codec.clone(), None, None));
for hwaccel in hwaccels.clone() {
return_values.push((codec.clone(), Some(hwaccel), None));
}
for quality in qualities.clone() {
return_values.push((codec.clone(), None, Some(quality)))
}
for hwaccel in hwaccels {
for quality in qualities.clone() {
return_values.push((codec.clone(), Some(hwaccel.clone()), Some(quality)))
}
}
}
return_values
}
pub(crate) fn available_matches_human_readable() -> Vec<String> {
let mut return_values = vec![];
for (codec, hwaccel, quality) in FFmpegPreset::available_matches() {
let mut description_details = vec![];
if let Some(h) = &hwaccel {
description_details.push(format!("{h} hardware acceleration"))
}
if let Some(q) = &quality {
description_details.push(format!("{q} video quality/compression"))
}
let description = if description_details.is_empty() {
format!("{codec} encoded with default video quality/compression",)
} else if description_details.len() == 1 {
format!("{} encoded with {}", codec, description_details[0])
} else {
let first = description_details.remove(0);
let last = description_details.remove(description_details.len() - 1);
let mid = if !description_details.is_empty() {
format!(", {} ", description_details.join(", "))
} else {
"".to_string()
};
format!("{codec} encoded with {first}{mid} and {last}",)
};
return_values.push(format!(
"{} ({})",
vec![
Some(codec.to_string()),
hwaccel.map(|h| h.to_string()),
quality.map(|q| q.to_string())
]
.into_iter()
.flatten()
.collect::<Vec<String>>()
.join("-"),
description
))
}
return_values
}
pub(crate) fn parse(s: &str) -> Result<FFmpegPreset, String> {
if !PREDEFINED_PRESET.is_match(s) {
return Ok(FFmpegPreset::Custom(Some(s.to_string())));
}
let mut codec: Option<FFmpegCodec> = None;
let mut hwaccel: Option<FFmpegHwAccel> = None;
let mut quality: Option<FFmpegQuality> = None;
for token in s.split('-') {
if let Some(c) = FFmpegCodec::all()
.into_iter()
.find(|p| p.to_string() == token.to_lowercase())
{
if let Some(cc) = codec {
return Err(format!("cannot use multiple codecs (found {cc} and {c})",));
}
codec = Some(c)
} else if let Some(h) = FFmpegHwAccel::all()
.into_iter()
.find(|p| p.to_string() == token.to_lowercase())
{
if let Some(hh) = hwaccel {
return Err(format!(
"cannot use multiple hardware accelerations (found {hh} and {h})",
));
}
hwaccel = Some(h)
} else if let Some(q) = FFmpegQuality::all()
.into_iter()
.find(|p| p.to_string() == token.to_lowercase())
{
if let Some(qq) = quality {
return Err(format!(
"cannot use multiple ffmpeg preset qualities (found {qq} and {q})",
));
}
quality = Some(q)
} else {
return Err(format!(
"'{}' is not a valid ffmpeg preset (unknown token '{}')",
s, token
));
}
}
if let Some(c) = codec {
if !FFmpegPreset::available_matches().contains(&(
c.clone(),
hwaccel.clone(),
quality.clone(),
)) {
return Err("ffmpeg preset is not supported".to_string());
}
Ok(FFmpegPreset::Predefined(
c,
hwaccel,
quality.unwrap_or(FFmpegQuality::Normal),
))
} else {
Err("cannot use ffmpeg preset with without a codec".to_string())
}
}
pub(crate) fn into_input_output_args(self) -> (Vec<String>, Vec<String>) {
match self {
FFmpegPreset::Custom(output) => (
vec![],
output.map_or(vec![], |o| shlex::split(&o).unwrap_or_default()),
),
FFmpegPreset::Predefined(codec, hwaccel_opt, quality) => {
let mut input = vec![];
let mut output = vec![];
match codec {
FFmpegCodec::H264 => {
let mut crf_quality = || match quality {
FFmpegQuality::Lossless => output.extend(["-crf", "18"]),
FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-crf", "35"]),
};
if let Some(hwaccel) = hwaccel_opt {
match hwaccel {
FFmpegHwAccel::Nvidia => {
input.extend([
"-hwaccel",
"cuda",
"-hwaccel_output_format",
"cuda",
"-c:v",
"h264_cuvid",
]);
crf_quality();
output.extend(["-c:v", "h264_nvenc", "-c:a", "copy"])
}
FFmpegHwAccel::Amd => {
crf_quality();
output.extend(["-c:v", "h264_amf", "-c:a", "copy"])
}
FFmpegHwAccel::Apple => {
// Apple's Video Toolbox encoders ignore `-crf`, use `-q:v`
// instead. It's on a scale of 1-100, 100 being lossless. Just
// did some math ((-a/51+1)*99+1 where `a` is the old crf value)
// so these settings very likely need some more tweaking
match quality {
FFmpegQuality::Lossless => output.extend(["-q:v", "65"]),
FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-q:v", "32"]),
}
output.extend(["-c:v", "h264_videotoolbox", "-c:a", "copy"])
}
}
} else {
crf_quality();
output.extend(["-c:v", "libx264", "-c:a", "copy"])
}
}
FFmpegCodec::H265 => {
let mut crf_quality = || match quality {
FFmpegQuality::Lossless => output.extend(["-crf", "20"]),
FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-crf", "35"]),
};
if let Some(hwaccel) = hwaccel_opt {
match hwaccel {
FFmpegHwAccel::Nvidia => {
input.extend([
"-hwaccel",
"cuda",
"-hwaccel_output_format",
"cuda",
"-c:v",
"h264_cuvid",
]);
crf_quality();
output.extend([
"-c:v",
"hevc_nvenc",
"-c:a",
"copy",
"-tag:v",
"hvc1",
])
}
FFmpegHwAccel::Amd => {
crf_quality();
output.extend(["-c:v", "hevc_amf", "-c:a", "copy"])
}
FFmpegHwAccel::Apple => {
// See the comment for apple h264 hwaccel
match quality {
FFmpegQuality::Lossless => output.extend(["-q:v", "61"]),
FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-q:v", "32"]),
}
output.extend([
"-c:v",
"hevc_videotoolbox",
"-c:a",
"copy",
"-tag:v",
"hvc1",
])
}
}
} else {
crf_quality();
output.extend(["-c:v", "libx265", "-c:a", "copy", "-tag:v", "hvc1"])
}
}
FFmpegCodec::Av1 => {
let mut crf_quality = || match quality {
FFmpegQuality::Lossless => output.extend(["-crf", "22"]),
FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-crf", "35"]),
};
crf_quality();
if let Some(FFmpegHwAccel::Amd) = hwaccel_opt {
output.extend(["-c:v", "av1_amf", "-c:a", "copy"]);
} else {
output.extend(["-c:v", "libsvtav1", "-c:a", "copy"]);
}
}
}
(
input
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<String>>(),
output
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<String>>(),
)
}
}
}
}

View file

@ -0,0 +1,467 @@
use crate::utils::format::{SingleFormat, SingleFormatCollection};
use crate::utils::interactive_select::{check_for_duplicated_seasons, get_duplicated_seasons};
use crate::utils::parse::{fract, UrlFilter};
use anyhow::Result;
use crunchyroll_rs::{
Concert, Episode, Locale, MediaCollection, Movie, MovieListing, MusicVideo, Season, Series,
};
use log::{info, warn};
use std::collections::{BTreeMap, HashMap};
use std::ops::Not;
pub(crate) enum FilterMediaScope<'a> {
Series(&'a Series),
Season(&'a Season),
/// Always contains 1 or 2 episodes.
/// - 1: The episode's audio is completely missing
/// - 2: The requested audio is only available from first entry to last entry
Episode(Vec<&'a Episode>),
}
pub(crate) struct Filter {
url_filter: UrlFilter,
skip_specials: bool,
interactive_input: bool,
relative_episode_number: bool,
audio_locales: Vec<Locale>,
subtitle_locales: Vec<Locale>,
audios_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
subtitles_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
no_premium: fn(u32) -> Result<()>,
is_premium: bool,
series_visited: bool,
season_episodes: HashMap<String, Vec<Episode>>,
season_with_premium: Option<Vec<u32>>,
season_sorting: Vec<String>,
}
impl Filter {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new(
url_filter: UrlFilter,
audio_locales: Vec<Locale>,
subtitle_locales: Vec<Locale>,
audios_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
subtitles_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
no_premium: fn(u32) -> Result<()>,
relative_episode_number: bool,
interactive_input: bool,
skip_specials: bool,
is_premium: bool,
) -> Self {
Self {
url_filter,
audio_locales,
subtitle_locales,
relative_episode_number,
interactive_input,
audios_missing,
subtitles_missing,
no_premium,
is_premium,
series_visited: false,
season_episodes: HashMap::new(),
skip_specials,
season_with_premium: is_premium.not().then_some(vec![]),
season_sorting: vec![],
}
}
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>> {
// the audio locales field isn't always populated
if !series.audio_locales.is_empty() {
let missing_audios = missing_locales(&series.audio_locales, &self.audio_locales);
if !missing_audios.is_empty()
&& !(self.audios_missing)(FilterMediaScope::Series(&series), missing_audios)?
{
return Ok(vec![]);
}
let missing_subtitles =
missing_locales(&series.subtitle_locales, &self.subtitle_locales);
if !missing_subtitles.is_empty()
&& !(self.subtitles_missing)(FilterMediaScope::Series(&series), missing_subtitles)?
{
return Ok(vec![]);
}
}
let mut seasons = vec![];
for season in series.seasons().await? {
if !self.url_filter.is_season_valid(season.season_number) {
continue;
}
let missing_audios = missing_locales(
&season
.versions
.iter()
.map(|l| l.audio_locale.clone())
.collect::<Vec<Locale>>(),
&self.audio_locales,
);
if !missing_audios.is_empty()
&& !(self.audios_missing)(FilterMediaScope::Season(&season), missing_audios)?
{
return Ok(vec![]);
}
seasons.push(season)
}
let duplicated_seasons = get_duplicated_seasons(&seasons);
if !duplicated_seasons.is_empty() {
if self.interactive_input {
check_for_duplicated_seasons(&mut seasons)
} else {
info!(
"Found duplicated seasons: {}",
duplicated_seasons
.iter()
.map(|d| d.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
}
self.series_visited = true;
Ok(seasons)
}
async fn visit_season(&mut self, season: Season) -> Result<Vec<Episode>> {
if !self.url_filter.is_season_valid(season.season_number) {
return Ok(vec![]);
}
let mut seasons = vec![];
if self
.audio_locales
.iter()
.any(|l| season.audio_locales.contains(l))
{
seasons.push(season.clone())
}
for version in season.versions {
if season.id == version.id {
continue;
}
if self.audio_locales.contains(&version.audio_locale) {
seasons.push(version.season().await?)
}
}
let mut episodes = vec![];
for season in seasons {
self.season_sorting.push(season.id.clone());
let mut eps = season.episodes().await?;
// removes any episode that does not have the audio locale of the season. yes, this is
// the case sometimes
if season.audio_locales.len() < 2 {
let season_locale = season
.audio_locales
.first()
.cloned()
.unwrap_or(Locale::ja_JP);
eps.retain(|e| e.audio_locale == season_locale)
}
#[allow(clippy::if_same_then_else)]
if eps.len() < season.number_of_episodes as usize {
if eps.is_empty()
&& !(self.audios_missing)(
FilterMediaScope::Season(&season),
season.audio_locales.iter().collect(),
)?
{
return Ok(vec![]);
} else if !eps.is_empty()
&& !(self.audios_missing)(
FilterMediaScope::Episode(vec![eps.first().unwrap(), eps.last().unwrap()]),
vec![&eps.first().unwrap().audio_locale],
)?
{
return Ok(vec![]);
}
}
episodes.extend(eps)
}
if self.relative_episode_number {
for episode in &episodes {
self.season_episodes
.entry(episode.season_id.clone())
.or_default()
.push(episode.clone())
}
}
Ok(episodes)
}
async fn visit_episode(&mut self, episode: Episode) -> Result<Vec<SingleFormat>> {
if !self
.url_filter
.is_episode_valid(episode.sequence_number, episode.season_number)
{
return Ok(vec![]);
}
// skip the episode if it's a special
if self.skip_specials
&& (episode.sequence_number == 0.0 || episode.sequence_number.fract() != 0.0)
{
return Ok(vec![]);
}
let mut episodes = vec![];
if !self.series_visited {
if self.audio_locales.contains(&episode.audio_locale) {
episodes.push(episode.clone())
}
for version in &episode.versions {
// `episode` is also a version of itself. the if block above already adds the
// episode if it matches the requested audio, so it doesn't need to be requested
// here again
if version.id == episode.id {
continue;
}
if self.audio_locales.contains(&version.audio_locale) {
episodes.push(version.episode().await?)
}
}
let audio_locales: Vec<Locale> =
episodes.iter().map(|e| e.audio_locale.clone()).collect();
let missing_audios = missing_locales(&audio_locales, &self.audio_locales);
if !missing_audios.is_empty()
&& !(self.audios_missing)(
FilterMediaScope::Episode(vec![&episode]),
missing_audios,
)?
{
return Ok(vec![]);
}
let mut subtitle_locales: Vec<Locale> = episodes
.iter()
.flat_map(|e| e.subtitle_locales.clone())
.collect();
subtitle_locales.sort();
subtitle_locales.dedup();
let missing_subtitles = missing_locales(&subtitle_locales, &self.subtitle_locales);
if !missing_subtitles.is_empty()
&& !(self.subtitles_missing)(
FilterMediaScope::Episode(vec![&episode]),
missing_subtitles,
)?
{
return Ok(vec![]);
}
} else {
episodes.push(episode.clone())
}
if let Some(seasons_with_premium) = &mut self.season_with_premium {
let episodes_len_before = episodes.len();
episodes.retain(|e| !e.is_premium_only && !self.is_premium);
if episodes_len_before < episodes.len()
&& !seasons_with_premium.contains(&episode.season_number)
{
(self.no_premium)(episode.season_number)?;
seasons_with_premium.push(episode.season_number)
}
if episodes.is_empty() {
return Ok(vec![]);
}
}
let mut relative_episode_number = None;
let mut relative_sequence_number = None;
if self.relative_episode_number {
let season_eps = match self.season_episodes.get(&episode.season_id) {
Some(eps) => eps,
None => {
self.season_episodes.insert(
episode.season_id.clone(),
episode.season().await?.episodes().await?,
);
self.season_episodes.get(&episode.season_id).unwrap()
}
};
let mut non_integer_sequence_number_count = 0;
for (i, ep) in season_eps.iter().enumerate() {
if ep.sequence_number != 0.0 || ep.sequence_number.fract() == 0.0 {
non_integer_sequence_number_count += 1
}
if ep.id == episode.id {
relative_episode_number = Some(i + 1);
relative_sequence_number = Some(
(i + 1 - non_integer_sequence_number_count) as f32
+ fract(ep.sequence_number),
);
break;
}
}
if relative_episode_number.is_none() || relative_sequence_number.is_none() {
warn!(
"Failed to get relative episode number for episode {} ({}) of {} season {}",
episode.sequence_number,
episode.title,
episode.series_title,
episode.season_number,
)
}
}
Ok(episodes
.into_iter()
.map(|e| {
SingleFormat::new_from_episode(
e.clone(),
e.subtitle_locales,
relative_episode_number.map(|n| n as u32),
relative_sequence_number,
)
})
.collect())
}
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>> {
Ok(movie_listing.movies().await?)
}
async fn visit_movie(&mut self, movie: Movie) -> Result<Vec<SingleFormat>> {
Ok(vec![SingleFormat::new_from_movie(movie, vec![])])
}
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Vec<SingleFormat>> {
Ok(vec![SingleFormat::new_from_music_video(music_video)])
}
async fn visit_concert(&mut self, concert: Concert) -> Result<Vec<SingleFormat>> {
Ok(vec![SingleFormat::new_from_concert(concert)])
}
async fn finish(self, input: Vec<Vec<SingleFormat>>) -> Result<SingleFormatCollection> {
let flatten_input: Vec<SingleFormat> = input.into_iter().flatten().collect();
let mut single_format_collection = SingleFormatCollection::new();
let mut pre_sorted: BTreeMap<String, Vec<SingleFormat>> = BTreeMap::new();
for data in flatten_input {
pre_sorted
.entry(data.identifier.clone())
.or_default()
.push(data)
}
let mut sorted: Vec<(String, Vec<SingleFormat>)> = pre_sorted.into_iter().collect();
sorted.sort_by(|(_, a), (_, b)| {
self.season_sorting
.iter()
.position(|p| p == &a.first().unwrap().season_id)
.unwrap()
.cmp(
&self
.season_sorting
.iter()
.position(|p| p == &b.first().unwrap().season_id)
.unwrap(),
)
});
for (_, mut data) in sorted {
data.sort_by(|a, b| {
self.audio_locales
.iter()
.position(|p| p == &a.audio)
.unwrap_or(usize::MAX)
.cmp(
&self
.audio_locales
.iter()
.position(|p| p == &b.audio)
.unwrap_or(usize::MAX),
)
});
single_format_collection.add_single_formats(data)
}
Ok(single_format_collection)
}
pub(crate) async fn visit(
mut self,
media_collection: MediaCollection,
) -> Result<SingleFormatCollection> {
let mut items = vec![media_collection];
let mut result = vec![];
while !items.is_empty() {
let mut new_items: Vec<MediaCollection> = vec![];
for i in items {
match i {
MediaCollection::Series(series) => new_items.extend(
self.visit_series(series)
.await?
.into_iter()
.map(|s| s.into())
.collect::<Vec<MediaCollection>>(),
),
MediaCollection::Season(season) => new_items.extend(
self.visit_season(season)
.await?
.into_iter()
.map(|s| s.into())
.collect::<Vec<MediaCollection>>(),
),
MediaCollection::Episode(episode) => {
result.push(self.visit_episode(episode).await?)
}
MediaCollection::MovieListing(movie_listing) => new_items.extend(
self.visit_movie_listing(movie_listing)
.await?
.into_iter()
.map(|m| m.into())
.collect::<Vec<MediaCollection>>(),
),
MediaCollection::Movie(movie) => result.push(self.visit_movie(movie).await?),
MediaCollection::MusicVideo(music_video) => {
result.push(self.visit_music_video(music_video).await?)
}
MediaCollection::Concert(concert) => {
result.push(self.visit_concert(concert).await?)
}
}
}
items = new_items
}
self.finish(result).await
}
}
fn missing_locales<'a>(available: &[Locale], searched: &'a [Locale]) -> Vec<&'a Locale> {
searched.iter().filter(|p| !available.contains(p)).collect()
}
/// Remove all duplicates from a [`Vec`].
pub fn real_dedup_vec<T: Clone + Eq>(input: &mut Vec<T>) {
let mut dedup = vec![];
for item in input.clone() {
if !dedup.contains(&item) {
dedup.push(item);
}
}
*input = dedup
}

View file

@ -0,0 +1,19 @@
use chrono::TimeDelta;
pub fn format_time_delta(time_delta: &TimeDelta) -> String {
let negative = *time_delta < TimeDelta::zero();
let time_delta = time_delta.abs();
let hours = time_delta.num_hours();
let minutes = time_delta.num_minutes() - time_delta.num_hours() * 60;
let seconds = time_delta.num_seconds() - time_delta.num_minutes() * 60;
let milliseconds = time_delta.num_milliseconds() - time_delta.num_seconds() * 1000;
format!(
"{}{}:{:0>2}:{:0>2}.{:0>3}",
if negative { "-" } else { "" },
hours,
minutes,
seconds,
milliseconds
)
}

View file

@ -0,0 +1,603 @@
use crate::utils::filter::real_dedup_vec;
use crate::utils::locale::LanguageTagging;
use crate::utils::log::tab_info;
use crate::utils::os::{is_special_file, sanitize};
use anyhow::{bail, Result};
use chrono::{Datelike, Duration};
use crunchyroll_rs::media::{SkipEvents, Stream, StreamData, Subtitle};
use crunchyroll_rs::{Concert, Episode, Locale, MediaCollection, Movie, MusicVideo};
use log::{debug, info};
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::env;
use std::path::{Path, PathBuf};
#[allow(dead_code)]
#[derive(Clone)]
pub struct SingleFormat {
pub identifier: String,
pub title: String,
pub description: String,
pub release_year: u64,
pub release_month: u64,
pub release_day: u64,
pub audio: Locale,
pub subtitles: Vec<Locale>,
pub series_id: String,
pub series_name: String,
pub season_id: String,
pub season_title: String,
pub season_number: u32,
pub episode_id: String,
pub episode_number: String,
pub relative_episode_number: Option<u32>,
pub sequence_number: f32,
pub relative_sequence_number: Option<f32>,
pub duration: Duration,
source: MediaCollection,
}
impl SingleFormat {
pub fn new_from_episode(
episode: Episode,
subtitles: Vec<Locale>,
relative_episode_number: Option<u32>,
relative_sequence_number: Option<f32>,
) -> Self {
Self {
identifier: if episode.identifier.is_empty() {
// crunchyroll sometimes leafs the identifier field empty so we have to build it
// ourself. it's not 100% save that the identifier which is built here is the same
// as if crunchyroll would deliver it (because the variables used here may also be
// wrong delivered by crunchy), but it's the best thing i can do at the moment
format!(
"{}|S{}|E{}",
episode.series_id, episode.season_number, episode.sequence_number
)
} else {
episode.identifier.clone()
},
title: episode.title.clone(),
description: episode.description.clone(),
release_year: episode.episode_air_date.year() as u64,
release_month: episode.episode_air_date.month() as u64,
release_day: episode.episode_air_date.day() as u64,
audio: episode.audio_locale.clone(),
subtitles,
series_id: episode.series_id.clone(),
series_name: episode.series_title.clone(),
season_id: episode.season_id.clone(),
season_title: episode.season_title.to_string(),
season_number: episode.season_number,
episode_id: episode.id.clone(),
episode_number: if episode.episode.is_empty() {
episode.sequence_number.to_string()
} else {
episode.episode.clone()
},
sequence_number: episode.sequence_number,
relative_episode_number,
relative_sequence_number,
duration: episode.duration,
source: episode.into(),
}
}
pub fn new_from_movie(movie: Movie, subtitles: Vec<Locale>) -> Self {
Self {
identifier: movie.id.clone(),
title: movie.title.clone(),
description: movie.description.clone(),
release_year: movie.free_available_date.year() as u64,
release_month: movie.free_available_date.month() as u64,
release_day: movie.free_available_date.day() as u64,
audio: Locale::ja_JP,
subtitles,
series_id: movie.movie_listing_id.clone(),
series_name: movie.movie_listing_title.clone(),
season_id: movie.movie_listing_id.clone(),
season_title: movie.movie_listing_title.to_string(),
season_number: 1,
episode_id: movie.id.clone(),
episode_number: "1".to_string(),
relative_episode_number: Some(1),
sequence_number: 1.0,
relative_sequence_number: Some(1.0),
duration: movie.duration,
source: movie.into(),
}
}
pub fn new_from_music_video(music_video: MusicVideo) -> Self {
Self {
identifier: music_video.id.clone(),
title: music_video.title.clone(),
description: music_video.description.clone(),
release_year: music_video.original_release.year() as u64,
release_month: music_video.original_release.month() as u64,
release_day: music_video.original_release.day() as u64,
audio: Locale::ja_JP,
subtitles: vec![],
series_id: music_video.id.clone(),
series_name: music_video.title.clone(),
season_id: music_video.id.clone(),
season_title: music_video.title.clone(),
season_number: 1,
episode_id: music_video.id.clone(),
episode_number: "1".to_string(),
relative_episode_number: Some(1),
sequence_number: 1.0,
relative_sequence_number: Some(1.0),
duration: music_video.duration,
source: music_video.into(),
}
}
pub fn new_from_concert(concert: Concert) -> Self {
Self {
identifier: concert.id.clone(),
title: concert.title.clone(),
description: concert.description.clone(),
release_year: concert.original_release.year() as u64,
release_month: concert.original_release.month() as u64,
release_day: concert.original_release.day() as u64,
audio: Locale::ja_JP,
subtitles: vec![],
series_id: concert.id.clone(),
series_name: concert.title.clone(),
season_id: concert.id.clone(),
season_title: concert.title.clone(),
season_number: 1,
episode_id: concert.id.clone(),
episode_number: "1".to_string(),
relative_episode_number: Some(1),
sequence_number: 1.0,
relative_sequence_number: Some(1.0),
duration: concert.duration,
source: concert.into(),
}
}
pub async fn stream(&self) -> Result<Stream> {
let stream = match &self.source {
MediaCollection::Episode(e) => e.stream_maybe_without_drm().await,
MediaCollection::Movie(m) => m.stream_maybe_without_drm().await,
MediaCollection::MusicVideo(mv) => mv.stream_maybe_without_drm().await,
MediaCollection::Concert(c) => c.stream_maybe_without_drm().await,
_ => unreachable!(),
};
if let Err(crunchyroll_rs::error::Error::Request { message, .. }) = &stream {
if message.starts_with("TOO_MANY_ACTIVE_STREAMS") {
bail!("Too many active/parallel streams. Please close at least one stream you're watching and try again")
}
};
Ok(stream?)
}
pub async fn skip_events(&self) -> Result<Option<SkipEvents>> {
match &self.source {
MediaCollection::Episode(e) => Ok(Some(e.skip_events().await?)),
MediaCollection::Movie(m) => Ok(Some(m.skip_events().await?)),
_ => Ok(None),
}
}
pub fn source_type(&self) -> String {
match &self.source {
MediaCollection::Episode(_) => "episode",
MediaCollection::Movie(_) => "movie",
MediaCollection::MusicVideo(_) => "music video",
MediaCollection::Concert(_) => "concert",
_ => unreachable!(),
}
.to_string()
}
pub fn is_episode(&self) -> bool {
matches!(self.source, MediaCollection::Episode(_))
}
pub fn is_special(&self) -> bool {
self.sequence_number == 0.0 || self.sequence_number.fract() != 0.0
}
}
struct SingleFormatCollectionEpisodeKey(f32);
impl PartialOrd for SingleFormatCollectionEpisodeKey {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for SingleFormatCollectionEpisodeKey {
fn cmp(&self, other: &Self) -> Ordering {
self.0.total_cmp(&other.0)
}
}
impl PartialEq for SingleFormatCollectionEpisodeKey {
fn eq(&self, other: &Self) -> bool {
self.0.eq(&other.0)
}
}
impl Eq for SingleFormatCollectionEpisodeKey {}
struct SingleFormatCollectionSeasonKey((u32, String));
#[allow(clippy::non_canonical_partial_ord_impl)]
impl PartialOrd for SingleFormatCollectionSeasonKey {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
let mut cmp = self.0 .0.partial_cmp(&other.0 .0);
if let Some(ordering) = cmp {
if matches!(ordering, Ordering::Equal) && self.0 .1 != other.0 .1 {
// first come first serve
cmp = Some(Ordering::Greater)
}
}
cmp
}
}
impl Ord for SingleFormatCollectionSeasonKey {
fn cmp(&self, other: &Self) -> Ordering {
let mut cmp = self.0 .0.cmp(&other.0 .0);
if matches!(cmp, Ordering::Equal) && self.0 .1 != other.0 .1 {
// first come first serve
cmp = Ordering::Greater
}
cmp
}
}
impl PartialEq for SingleFormatCollectionSeasonKey {
fn eq(&self, other: &Self) -> bool {
self.0.eq(&other.0)
}
}
impl Eq for SingleFormatCollectionSeasonKey {}
pub struct SingleFormatCollection(
BTreeMap<
SingleFormatCollectionSeasonKey,
BTreeMap<SingleFormatCollectionEpisodeKey, Vec<SingleFormat>>,
>,
);
impl SingleFormatCollection {
pub fn new() -> Self {
Self(BTreeMap::new())
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn add_single_formats(&mut self, single_formats: Vec<SingleFormat>) {
let format = single_formats.first().unwrap();
self.0
.entry(SingleFormatCollectionSeasonKey((
format.season_number,
format.season_id.clone(),
)))
.or_default()
.insert(
SingleFormatCollectionEpisodeKey(format.sequence_number),
single_formats,
);
}
pub fn full_visual_output(&self) {
debug!("Series has {} seasons", self.0.len());
for (season_key, episodes) in &self.0 {
let first_episode = episodes.first_key_value().unwrap().1.first().unwrap();
info!(
"{} Season {} ({})",
first_episode.series_name.clone(),
season_key.0 .0,
first_episode.season_title.clone(),
);
for (i, (_, formats)) in episodes.iter().enumerate() {
let format = formats.first().unwrap();
if log::max_level() == log::Level::Debug {
info!(
"{} S{:02}E{:0>2}",
format.title, format.season_number, format.episode_number
)
} else {
tab_info!(
"{}. {} » S{:02}E{:0>2}",
i + 1,
format.title,
format.season_number,
format.episode_number
)
}
}
}
}
}
impl IntoIterator for SingleFormatCollection {
type Item = Vec<SingleFormat>;
type IntoIter = SingleFormatCollectionIterator;
fn into_iter(self) -> Self::IntoIter {
SingleFormatCollectionIterator(self)
}
}
pub struct SingleFormatCollectionIterator(SingleFormatCollection);
impl Iterator for SingleFormatCollectionIterator {
type Item = Vec<SingleFormat>;
fn next(&mut self) -> Option<Self::Item> {
let (_, episodes) = self.0 .0.iter_mut().next()?;
let value = episodes.pop_first().unwrap().1;
if episodes.is_empty() {
self.0 .0.pop_first();
}
Some(value)
}
}
#[allow(dead_code)]
#[derive(Clone)]
pub struct Format {
pub title: String,
pub description: String,
pub locales: Vec<(Locale, Vec<Locale>)>,
pub width: u64,
pub height: u64,
pub fps: f64,
pub release_year: u64,
pub release_month: u64,
pub release_day: u64,
pub series_id: String,
pub series_name: String,
pub season_id: String,
pub season_title: String,
pub season_number: u32,
pub episode_id: String,
pub episode_number: String,
pub relative_episode_number: Option<u32>,
pub sequence_number: f32,
pub relative_sequence_number: Option<f32>,
}
impl Format {
#[allow(clippy::type_complexity)]
pub fn from_single_formats(
mut single_formats: Vec<(SingleFormat, StreamData, Vec<(Subtitle, bool)>)>,
) -> Self {
let locales: Vec<(Locale, Vec<Locale>)> = single_formats
.iter()
.map(|(single_format, _, subtitles)| {
(
single_format.audio.clone(),
subtitles
.iter()
.map(|(s, _)| s.locale.clone())
.collect::<Vec<Locale>>(),
)
})
.collect();
let (first_format, first_stream, _) = single_formats.remove(0);
Self {
title: first_format.title,
description: first_format.description,
locales,
width: first_stream.resolution().unwrap().width,
height: first_stream.resolution().unwrap().height,
fps: first_stream.fps().unwrap(),
release_year: first_format.release_year,
release_month: first_format.release_month,
release_day: first_format.release_day,
series_id: first_format.series_id,
series_name: first_format.series_name,
season_id: first_format.season_id,
season_title: first_format.season_title,
season_number: first_format.season_number,
episode_id: first_format.episode_id,
episode_number: first_format.episode_number,
relative_episode_number: first_format.relative_episode_number,
sequence_number: first_format.sequence_number,
relative_sequence_number: first_format.relative_sequence_number,
}
}
/// Formats the given string if it has specific pattern in it. It also sanitizes the filename.
pub fn format_path(
&self,
path: PathBuf,
universal: bool,
language_tagging: Option<&LanguageTagging>,
) -> PathBuf {
let path = path
.to_string_lossy()
.to_string()
.replace("{title}", &sanitize(&self.title, true, universal))
.replace(
"{audio}",
&sanitize(
self.locales
.iter()
.map(|(a, _)| language_tagging.map_or(a.to_string(), |t| t.for_locale(a)))
.collect::<Vec<String>>()
.join(
&env::var("CRUNCHY_CLI_FORMAT_DELIMITER")
.map_or("_".to_string(), |e| e),
),
true,
universal,
),
)
.replace(
"{width}",
&sanitize(self.width.to_string(), true, universal),
)
.replace(
"{height}",
&sanitize(self.height.to_string(), true, universal),
)
.replace("{series_id}", &sanitize(&self.series_id, true, universal))
.replace(
"{series_name}",
&sanitize(&self.series_name, true, universal),
)
.replace("{season_id}", &sanitize(&self.season_id, true, universal))
.replace(
"{season_name}",
&sanitize(&self.season_title, true, universal),
)
.replace(
"{season_number}",
&format!(
"{:0>2}",
sanitize(self.season_number.to_string(), true, universal)
),
)
.replace("{episode_id}", &sanitize(&self.episode_id, true, universal))
.replace(
"{episode_number}",
&format!("{:0>2}", sanitize(&self.episode_number, true, universal)),
)
.replace(
"{relative_episode_number}",
&format!(
"{:0>2}",
sanitize(
self.relative_episode_number.unwrap_or_default().to_string(),
true,
universal,
)
),
)
.replace(
"{sequence_number}",
&format!(
"{:0>2}",
sanitize(self.sequence_number.to_string(), true, universal)
),
)
.replace(
"{relative_sequence_number}",
&format!(
"{:0>2}",
sanitize(
self.relative_sequence_number
.unwrap_or_default()
.to_string(),
true,
universal,
)
),
)
.replace(
"{release_year}",
&sanitize(self.release_year.to_string(), true, universal),
)
.replace(
"{release_month}",
&format!(
"{:0>2}",
sanitize(self.release_month.to_string(), true, universal)
),
)
.replace(
"{release_day}",
&format!(
"{:0>2}",
sanitize(self.release_day.to_string(), true, universal)
),
);
let mut path = PathBuf::from(path);
// make sure that every path section has a maximum of 255 characters
if path.file_name().unwrap_or_default().to_string_lossy().len() > 255 {
let name = path
.file_stem()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let ext = path
.extension()
.unwrap_or_default()
.to_string_lossy()
.to_string();
if ext != name {
path.set_file_name(format!("{}.{}", &name[..(255 - ext.len() - 1)], ext))
}
}
path.iter()
.map(|s| {
if s.len() > 255 {
s.to_string_lossy()[..255].to_string()
} else {
s.to_string_lossy().to_string()
}
})
.collect()
}
pub fn visual_output(&self, dst: &Path) {
info!(
"Downloading {} to {}",
self.title,
if is_special_file(dst) || dst.to_str().unwrap() == "-" {
dst.to_string_lossy().to_string()
} else {
format!("'{}'", dst.to_str().unwrap())
}
);
tab_info!(
"Episode: S{:02}E{:0>2}",
self.season_number,
self.episode_number
);
tab_info!(
"Audio: {}",
self.locales
.iter()
.map(|(a, _)| a.to_string())
.collect::<Vec<String>>()
.join(", ")
);
let mut subtitles: Vec<Locale> = self.locales.iter().flat_map(|(_, s)| s.clone()).collect();
real_dedup_vec(&mut subtitles);
tab_info!(
"Subtitles: {}",
subtitles
.into_iter()
.map(|l| l.to_string())
.collect::<Vec<String>>()
.join(", ")
);
tab_info!("Resolution: {}x{}", self.height, self.width);
tab_info!("FPS: {:.2}", self.fps)
}
pub fn is_special(&self) -> bool {
self.sequence_number == 0.0 || self.sequence_number.fract() != 0.0
}
pub fn has_relative_fmt<S: AsRef<str>>(s: S) -> bool {
return s.as_ref().contains("{relative_episode_number}")
|| s.as_ref().contains("{relative_sequence_number}");
}
}

View file

@ -0,0 +1,73 @@
use crate::utils::log::progress_pause;
use crunchyroll_rs::Season;
use dialoguer::console::Term;
use dialoguer::MultiSelect;
use std::collections::BTreeMap;
pub fn get_duplicated_seasons(seasons: &Vec<Season>) -> Vec<u32> {
let mut season_number_counter = BTreeMap::<u32, u32>::new();
for season in seasons {
season_number_counter
.entry(season.season_number)
.and_modify(|c| *c += 1)
.or_default();
}
season_number_counter
.into_iter()
.filter_map(|(k, v)| if v > 0 { Some(k) } else { None })
.collect()
}
pub fn check_for_duplicated_seasons(seasons: &mut Vec<Season>) {
let mut as_map = BTreeMap::new();
for season in seasons.iter() {
as_map
.entry(season.season_number)
.or_insert(vec![])
.push(season)
}
let duplicates: Vec<&Season> = as_map
.into_values()
.filter(|s| s.len() > 1)
.flatten()
.collect();
progress_pause!();
let _ = Term::stdout().clear_line();
let keep = select(
"Duplicated seasons were found. Select the one you want to download (space to select/deselect; enter to continue)",
duplicates
.iter()
.map(|s| format!("Season {} ({})", s.season_number, s.title))
.collect(),
);
progress_pause!();
let mut remove_ids = vec![];
for (i, duplicate) in duplicates.into_iter().enumerate() {
if !keep.contains(&i) {
remove_ids.push(duplicate.id.clone())
}
}
seasons.retain(|s| !remove_ids.contains(&s.id));
}
pub fn select(prompt: &str, input: Vec<String>) -> Vec<usize> {
if input.is_empty() {
return vec![];
}
let def: Vec<bool> = (0..input.len()).map(|_| true).collect();
let selection = MultiSelect::new()
.with_prompt(prompt)
.items(&input[..])
.defaults(&def[..])
.clear(false)
.report(false)
.interact_on(&Term::stdout())
.unwrap_or_default();
selection
}

View file

@ -0,0 +1,148 @@
use crunchyroll_rs::Locale;
use log::warn;
#[derive(Clone, Debug)]
#[allow(clippy::upper_case_acronyms)]
pub enum LanguageTagging {
Default,
IETF,
}
impl LanguageTagging {
pub fn parse(s: &str) -> Result<Self, String> {
Ok(match s.to_lowercase().as_str() {
"default" => Self::Default,
"ietf" => Self::IETF,
_ => return Err(format!("'{}' is not a valid language tagging", s)),
})
}
pub fn convert_locales(&self, locales: &[Locale]) -> Vec<String> {
let ietf_language_codes = ietf_language_codes();
let mut converted = vec![];
match &self {
LanguageTagging::Default => {
for locale in locales {
let Some((_, available)) =
ietf_language_codes.iter().find(|(_, l)| l.contains(locale))
else {
// if no matching IETF language code was found, just pass it as it is
converted.push(locale.to_string());
continue;
};
converted.push(available.first().unwrap().to_string())
}
}
LanguageTagging::IETF => {
for locale in locales {
let Some((tag, _)) =
ietf_language_codes.iter().find(|(_, l)| l.contains(locale))
else {
// if no matching IETF language code was found, just pass it as it is
converted.push(locale.to_string());
continue;
};
converted.push(tag.to_string())
}
}
}
converted
}
pub fn for_locale(&self, locale: &Locale) -> String {
match &self {
LanguageTagging::Default => ietf_language_codes()
.iter()
.find(|(_, l)| l.contains(locale))
.map_or(locale.to_string(), |(_, l)| l[0].to_string()),
LanguageTagging::IETF => ietf_language_codes()
.iter()
.find(|(_, l)| l.contains(locale))
.map_or(locale.to_string(), |(tag, _)| tag.to_string()),
}
}
}
pub fn resolve_locales(locales: &[Locale]) -> Vec<Locale> {
let ietf_language_codes = ietf_language_codes();
let all_locales = Locale::all();
let mut resolved = vec![];
for locale in locales {
if all_locales.contains(locale) {
resolved.push(locale.clone())
} else if let Some((_, resolved_locales)) = ietf_language_codes
.iter()
.find(|(tag, _)| tag == &locale.to_string().as_str())
{
let (first, alternatives) = resolved_locales.split_first().unwrap();
resolved.push(first.clone());
// ignoring `Locale::en_IN` because I think the majority of users which want english
// audio / subs want the "actual" english version and not the hindi accent dub
if !alternatives.is_empty() && resolved_locales.first().unwrap() != &Locale::en_IN {
warn!("Resolving locale '{}' to '{}', but there are some alternatives: {}. If you an alternative instead, please write it completely out instead of '{}'", locale, first, alternatives.iter().map(|l| format!("'{l}'")).collect::<Vec<String>>().join(", "), locale)
}
} else {
resolved.push(locale.clone());
warn!("Unknown locale '{}'", locale)
}
}
resolved
}
fn ietf_language_codes<'a>() -> Vec<(&'a str, Vec<Locale>)> {
vec![
("ar", vec![Locale::ar_ME, Locale::ar_SA]),
("ca", vec![Locale::ca_ES]),
("de", vec![Locale::de_DE]),
("en", vec![Locale::en_US, Locale::hi_IN]),
("es", vec![Locale::es_ES, Locale::es_419, Locale::es_LA]),
("fr", vec![Locale::fr_FR]),
("hi", vec![Locale::hi_IN]),
("id", vec![Locale::id_ID]),
("it", vec![Locale::it_IT]),
("ja", vec![Locale::ja_JP]),
("ko", vec![Locale::ko_KR]),
("ms", vec![Locale::ms_MY]),
("pl", vec![Locale::pl_PL]),
("pt", vec![Locale::pt_PT, Locale::pt_BR]),
("ru", vec![Locale::ru_RU]),
("ta", vec![Locale::ta_IN]),
("te", vec![Locale::te_IN]),
("th", vec![Locale::th_TH]),
("tr", vec![Locale::tr_TR]),
("vi", vec![Locale::vi_VN]),
("zh", vec![Locale::zh_CN, Locale::zh_HK, Locale::zh_TW]),
]
}
/// Return the locale of the system.
pub fn system_locale() -> Locale {
if let Some(system_locale) = sys_locale::get_locale() {
let locale = Locale::from(system_locale);
if let Locale::Custom(_) = locale {
Locale::en_US
} else {
locale
}
} else {
Locale::en_US
}
}
/// Check if [`Locale::Custom("all")`] is in the provided locale list and return [`Locale::all`] if
/// so. If not, just return the provided locale list.
pub fn all_locale_in_locales(locales: Vec<Locale>) -> Vec<Locale> {
if locales
.iter()
.any(|l| l.to_string().to_lowercase().trim() == "all")
{
Locale::all()
} else {
locales
}
}

View file

@ -0,0 +1,186 @@
use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle};
use log::{
info, set_boxed_logger, set_max_level, Level, LevelFilter, Log, Metadata, Record,
SetLoggerError,
};
use std::io::{stdout, Write};
use std::sync::Mutex;
use std::thread;
use std::time::Duration;
pub struct ProgressHandler {
pub(crate) stopped: bool,
}
impl Drop for ProgressHandler {
fn drop(&mut self) {
if !self.stopped {
info!(target: "progress_end", "")
}
}
}
impl ProgressHandler {
pub(crate) fn stop<S: AsRef<str>>(mut self, msg: S) {
self.stopped = true;
info!(target: "progress_end", "{}", msg.as_ref())
}
}
macro_rules! progress {
($($arg:tt)+) => {
{
log::info!(target: "progress", $($arg)+);
$crate::utils::log::ProgressHandler{stopped: false}
}
}
}
pub(crate) use progress;
macro_rules! progress_pause {
() => {
{
log::info!(target: "progress_pause", "")
}
}
}
pub(crate) use progress_pause;
macro_rules! tab_info {
($($arg:tt)+) => {
if log::max_level() == log::LevelFilter::Debug {
info!($($arg)+)
} else {
info!("\t{}", format!($($arg)+))
}
}
}
pub(crate) use tab_info;
pub struct CliLogger {
level: LevelFilter,
progress: Mutex<Option<ProgressBar>>,
}
impl Log for CliLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= self.level
}
fn log(&self, record: &Record) {
if !self.enabled(record.metadata())
|| (record.target() != "progress"
&& record.target() != "progress_pause"
&& record.target() != "progress_end"
&& !record.target().starts_with("crunchy_cli"))
{
return;
}
if self.level >= LevelFilter::Debug {
self.extended(record);
return;
}
match record.target() {
"progress" => self.progress(record, false),
"progress_pause" => {
let progress = self.progress.lock().unwrap();
if let Some(p) = &*progress {
p.set_draw_target(if p.is_hidden() {
ProgressDrawTarget::stdout()
} else {
ProgressDrawTarget::hidden()
})
}
}
"progress_end" => self.progress(record, true),
_ => {
if self.progress.lock().unwrap().is_some() {
self.progress(record, false)
} else if record.level() > Level::Warn {
self.normal(record)
} else {
self.error(record)
}
}
}
}
fn flush(&self) {
let _ = stdout().flush();
}
}
impl CliLogger {
pub fn new(level: LevelFilter) -> Self {
Self {
level,
progress: Mutex::new(None),
}
}
pub fn init(level: LevelFilter) -> Result<(), SetLoggerError> {
set_max_level(level);
set_boxed_logger(Box::new(CliLogger::new(level)))
}
fn extended(&self, record: &Record) {
println!(
"[{}] {} {} ({}) {}",
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S"),
record.level(),
// replace the 'progress' prefix if this function is invoked via 'progress!'
record
.target()
.replacen("crunchy_cli_core", "crunchy_cli", 1)
.replacen("progress_end", "crunchy_cli", 1)
.replacen("progress", "crunchy_cli", 1),
format!("{:?}", thread::current().id())
.replace("ThreadId(", "")
.replace(')', ""),
record.args()
)
}
fn normal(&self, record: &Record) {
println!(":: {}", record.args())
}
fn error(&self, record: &Record) {
eprintln!(":: {}", record.args())
}
fn progress(&self, record: &Record, stop: bool) {
let mut progress = self.progress.lock().unwrap();
let msg = format!("{}", record.args());
if stop && progress.is_some() {
if msg.is_empty() {
progress.take().unwrap().finish()
} else {
progress.take().unwrap().finish_with_message(msg)
}
} else if let Some(p) = &*progress {
p.println(format!(":: → {}", msg))
} else {
#[cfg(not(windows))]
let finish_str = "";
#[cfg(windows)]
// windows does not support all unicode characters by default in their consoles, so
// we're using this (square root) symbol instead. microsoft.
let finish_str = "";
let pb = ProgressBar::new_spinner();
pb.set_style(
ProgressStyle::with_template(":: {spinner} {msg}")
.unwrap()
.tick_strings(&["", "\\", "|", "/", finish_str]),
);
pb.set_draw_target(ProgressDrawTarget::stdout());
pb.enable_steady_tick(Duration::from_millis(200));
pb.set_message(msg);
*progress = Some(pb)
}
}
}

View file

@ -0,0 +1,15 @@
pub mod clap;
pub mod context;
pub mod download;
pub mod ffmpeg;
pub mod filter;
pub mod fmt;
pub mod format;
pub mod interactive_select;
pub mod locale;
pub mod log;
pub mod os;
pub mod parse;
pub mod rate_limit;
pub mod sync;
pub mod video;

View file

@ -0,0 +1,225 @@
use log::debug;
use regex::{Regex, RegexBuilder};
use std::borrow::Cow;
use std::io::ErrorKind;
use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::process::{Command, Stdio};
use std::task::{Context, Poll};
use std::{env, fs, io};
use tempfile::{Builder, NamedTempFile, TempPath};
use tokio::io::{AsyncRead, ReadBuf};
pub fn has_ffmpeg() -> bool {
if let Err(e) = Command::new("ffmpeg").stderr(Stdio::null()).spawn() {
if ErrorKind::NotFound != e.kind() {
debug!(
"unknown error occurred while checking if ffmpeg exists: {}",
e.kind()
)
}
false
} else {
true
}
}
/// Get the temp directory either by the specified `CRUNCHY_CLI_TEMP_DIR` env variable or the dir
/// provided by the os.
pub fn temp_directory() -> PathBuf {
env::var("CRUNCHY_CLI_TEMP_DIR").map_or(env::temp_dir(), PathBuf::from)
}
/// Any tempfile should be created with this function. The prefix and directory of every file
/// created with this function stays the same which is helpful to query all existing tempfiles and
/// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like
/// setting the wrong prefix if done manually.
pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
let tempfile = Builder::default()
.prefix(".crunchy-cli_")
.suffix(suffix.as_ref())
.tempfile_in(temp_directory())?;
debug!(
"Created temporary file: {}",
tempfile.path().to_string_lossy()
);
Ok(tempfile)
}
pub fn cache_dir<S: AsRef<str>>(name: S) -> io::Result<PathBuf> {
let cache_dir = temp_directory().join(format!(".crunchy-cli_{}_cache", name.as_ref()));
fs::create_dir_all(&cache_dir)?;
Ok(cache_dir)
}
pub struct TempNamedPipe {
path: TempPath,
#[cfg(not(target_os = "windows"))]
reader: tokio::net::unix::pipe::Receiver,
#[cfg(target_os = "windows")]
file: tokio::fs::File,
}
impl TempNamedPipe {
pub fn path(&self) -> &Path {
&self.path
}
}
impl AsyncRead for TempNamedPipe {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
#[cfg(not(target_os = "windows"))]
return Pin::new(&mut self.reader).poll_read(cx, buf);
// very very dirty implementation of a 'tail' like behavior
#[cfg(target_os = "windows")]
{
let mut tmp_bytes = vec![0; buf.remaining()];
let mut tmp_buf = ReadBuf::new(tmp_bytes.as_mut_slice());
loop {
return match Pin::new(&mut self.file).poll_read(cx, &mut tmp_buf) {
Poll::Ready(r) => {
if r.is_ok() {
if !tmp_buf.filled().is_empty() {
buf.put_slice(tmp_buf.filled())
} else {
// sleep to not loop insanely fast and consume unnecessary system resources
std::thread::sleep(std::time::Duration::from_millis(50));
continue;
}
}
Poll::Ready(r)
}
Poll::Pending => Poll::Pending,
};
}
}
}
}
impl Drop for TempNamedPipe {
fn drop(&mut self) {
#[cfg(not(target_os = "windows"))]
let _ = nix::unistd::unlink(self.path.to_string_lossy().to_string().as_str());
}
}
pub fn temp_named_pipe() -> io::Result<TempNamedPipe> {
let tmp = tempfile("")?;
#[cfg(not(target_os = "windows"))]
{
let path = tmp.into_temp_path();
let _ = fs::remove_file(&path);
nix::unistd::mkfifo(
path.to_string_lossy().to_string().as_str(),
nix::sys::stat::Mode::S_IRWXU,
)?;
Ok(TempNamedPipe {
reader: tokio::net::unix::pipe::OpenOptions::new().open_receiver(&path)?,
path,
})
}
#[cfg(target_os = "windows")]
{
let (file, path) = tmp.into_parts();
Ok(TempNamedPipe {
file: tokio::fs::File::from_std(file),
path,
})
}
}
/// Check if the given path exists and rename it until the new (renamed) file does not exist.
pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
// do not rename it if it exists but is a special file
if is_special_file(&path) {
return (path, false);
}
let mut i = 0;
while path.exists() {
i += 1;
let mut ext = path.extension().unwrap_or_default().to_str().unwrap();
let mut filename = path.file_stem().unwrap_or_default().to_str().unwrap();
// if the extension is empty, the filename without extension is probably empty
// (e.g. `.mp4`). in this case Rust assumes that `.mp4` is the file stem rather than the
// extension. if this is the case, set the extension to the file stem and make the file stem
// empty
if ext.is_empty() {
ext = filename;
filename = "";
}
if filename.ends_with(&format!(" ({})", i - 1)) {
filename = filename.strip_suffix(&format!(" ({})", i - 1)).unwrap();
}
path.set_file_name(format!("{} ({}).{}", filename, i, ext))
}
(path, i != 0)
}
/// Check if the given path is a special file. On Linux this is probably a pipe and on Windows
/// ¯\_(ツ)_/¯
pub fn is_special_file<P: AsRef<Path>>(path: P) -> bool {
path.as_ref().exists() && !path.as_ref().is_file() && !path.as_ref().is_dir()
}
lazy_static::lazy_static! {
static ref WINDOWS_NON_PRINTABLE_RE: Regex = Regex::new(r"[\x00-\x1f\x80-\x9f]").unwrap();
static ref WINDOWS_ILLEGAL_RE: Regex = Regex::new(r#"[<>:"|?*]"#).unwrap();
static ref WINDOWS_RESERVED_RE: Regex = RegexBuilder::new(r"(?i)^(con|prn|aux|nul|com[0-9]|lpt[0-9])(\..*)?$")
.case_insensitive(true)
.build()
.unwrap();
static ref WINDOWS_TRAILING_RE: Regex = Regex::new(r"[\. ]+$").unwrap();
static ref LINUX_NON_PRINTABLE: Regex = Regex::new(r"[\x00]").unwrap();
static ref RESERVED_RE: Regex = Regex::new(r"^\.+$").unwrap();
}
/// Sanitizes a filename with the option to include/exclude the path separator from sanitizing.
pub fn sanitize<S: AsRef<str>>(path: S, include_path_separator: bool, universal: bool) -> String {
let path = Cow::from(path.as_ref().trim());
let path = RESERVED_RE.replace(&path, "");
let collect = |name: String| {
if name.len() > 255 {
name[..255].to_string()
} else {
name
}
};
if universal || cfg!(windows) {
let path = WINDOWS_NON_PRINTABLE_RE.replace_all(&path, "");
let path = WINDOWS_ILLEGAL_RE.replace_all(&path, "");
let path = WINDOWS_RESERVED_RE.replace_all(&path, "");
let path = WINDOWS_TRAILING_RE.replace(&path, "");
let mut path = path.to_string();
if include_path_separator {
path = path.replace(['\\', '/'], "");
}
collect(path)
} else {
let path = LINUX_NON_PRINTABLE.replace_all(&path, "");
let mut path = path.to_string();
if include_path_separator {
path = path.replace('/', "");
}
collect(path)
}
}

View file

@ -0,0 +1,207 @@
use anyhow::{anyhow, bail, Result};
use crunchyroll_rs::media::Resolution;
use crunchyroll_rs::{Crunchyroll, MediaCollection, UrlType};
use log::debug;
use regex::Regex;
/// Define a find, based on season and episode number to find episodes / movies.
/// If a struct instance equals the [`Default::default()`] it's considered that no find is applied.
/// If `from_*` is [`None`] they're set to [`u32::MIN`].
/// If `to_*` is [`None`] they're set to [`u32::MAX`].
#[derive(Debug, Default)]
pub struct InnerUrlFilter {
from_episode: Option<f32>,
to_episode: Option<f32>,
from_season: Option<u32>,
to_season: Option<u32>,
}
#[derive(Debug)]
pub struct UrlFilter {
inner: Vec<InnerUrlFilter>,
}
impl Default for UrlFilter {
fn default() -> Self {
Self {
inner: vec![InnerUrlFilter::default()],
}
}
}
impl UrlFilter {
pub fn is_season_valid(&self, season: u32) -> bool {
self.inner.iter().any(|f| {
let from_season = f.from_season.unwrap_or(u32::MIN);
let to_season = f.to_season.unwrap_or(u32::MAX);
season >= from_season && season <= to_season
})
}
pub fn is_episode_valid(&self, episode: f32, season: u32) -> bool {
self.inner.iter().any(|f| {
let from_episode = f.from_episode.unwrap_or(f32::MIN);
let to_episode = f.to_episode.unwrap_or(f32::MAX);
let from_season = f.from_season.unwrap_or(u32::MIN);
let to_season = f.to_season.unwrap_or(u32::MAX);
if season < from_season || season > to_season {
false
} else if season == from_season || (f.from_season.is_none() && f.to_season.is_none()) {
episode >= from_episode && episode <= to_episode
} else {
true
}
})
}
}
/// Parse a url and return all [`crunchyroll_rs::Media<crunchyroll_rs::Episode>`] &
/// [`crunchyroll_rs::Media<crunchyroll_rs::Movie>`] which could be related to it.
///
/// The `with_filter` arguments says if filtering should be enabled for the url. Filtering is a
/// specific pattern at the end of the url which declares which parts of the url content should be
/// returned / filtered (out). _This only works if the url points to a series_.
///
/// Examples how filtering works:
/// - `...[E5]` - Download the fifth episode.
/// - `...[S1]` - Download the full first season.
/// - `...[-S2]` - Download all seasons up to and including season 2.
/// - `...[S3E4-]` - Download all episodes from and including season 3, episode 4.
/// - `...[S1E4-S3]` - Download all episodes from and including season 1, episode 4, until andincluding season 3.
/// - `...[S3,S5]` - Download episode 3 and 5.
/// - `...[S1-S3,S4E2-S4E6]` - Download season 1 to 3 and episode 2 to episode 6 of season 4.
/// In practice, it would look like this: `https://crunchyroll.com/series/12345678/example[S1E5-S3E2]`.
pub async fn parse_url(
crunchy: &Crunchyroll,
mut url: String,
with_filter: bool,
) -> Result<(MediaCollection, UrlFilter)> {
let url_filter = if with_filter {
debug!("Url may contain filters");
let open_index = url.rfind('[').unwrap_or(0);
let close_index = url.rfind(']').unwrap_or(0);
let filter = if open_index < close_index {
let filter = url.as_str()[open_index + 1..close_index].to_string();
url = url.as_str()[0..open_index].to_string();
filter
} else {
"".to_string()
};
let filter_regex = Regex::new(r"((S(?P<from_season>\d+))?(E(?P<from_episode>\d+))?)(((?P<dash>-)((S(?P<to_season>\d+))?(E(?P<to_episode>\d+))?))?)(,|$)").unwrap();
let mut filters = vec![];
for capture in filter_regex.captures_iter(&filter) {
let dash = capture.name("dash").is_some();
let from_episode = capture
.name("from_episode")
.map_or(anyhow::Ok(None), |fe| Ok(Some(fe.as_str().parse()?)))?;
let to_episode = capture
.name("to_episode")
.map_or(anyhow::Ok(if dash { None } else { from_episode }), |te| {
Ok(Some(te.as_str().parse()?))
})?;
let from_season = capture
.name("from_season")
.map_or(anyhow::Ok(None), |fs| Ok(Some(fs.as_str().parse()?)))?;
let to_season = capture
.name("to_season")
.map_or(anyhow::Ok(if dash { None } else { from_season }), |ts| {
Ok(Some(ts.as_str().parse()?))
})?;
filters.push(InnerUrlFilter {
from_episode,
to_episode,
from_season,
to_season,
})
}
let url_filter = UrlFilter { inner: filters };
debug!("Url find: {:?}", url_filter);
url_filter
} else {
UrlFilter::default()
};
// check if the url is the old series/episode scheme which still occurs in some places (like the
// rss)
let old_url_regex = Regex::new(r"https?://(www\.)?crunchyroll\.com/.+").unwrap();
if old_url_regex.is_match(&url) {
debug!("Detected maybe old url");
// replace the 'http' prefix with 'https' as http is not supported by the reqwest client
if url.starts_with("http://") {
url.replace_range(0..4, "https")
}
// the old url redirects to the new url. request the old url, follow the redirects and
// extract the final url
url = crunchy.client().get(&url).send().await?.url().to_string()
}
let parsed_url = crunchyroll_rs::parse_url(url).ok_or(anyhow!("Invalid url"))?;
debug!("Url type: {:?}", parsed_url);
let media_collection = match parsed_url {
UrlType::Series(id)
| UrlType::MovieListing(id)
| UrlType::EpisodeOrMovie(id)
| UrlType::MusicVideo(id)
| UrlType::Concert(id) => crunchy.media_collection_from_id(id).await?,
};
Ok((media_collection, url_filter))
}
/// Parse a resolution given as a [`String`] to a [`crunchyroll_rs::media::Resolution`].
pub fn parse_resolution(mut resolution: String) -> Result<Resolution> {
resolution = resolution.to_lowercase();
if resolution == "best" {
Ok(Resolution {
width: u64::MAX,
height: u64::MAX,
})
} else if resolution == "worst" {
Ok(Resolution {
width: u64::MIN,
height: u64::MIN,
})
} else if resolution.ends_with('p') {
let without_p = resolution.as_str()[0..resolution.len() - 1]
.parse()
.map_err(|_| anyhow!("Could not find resolution"))?;
Ok(Resolution {
width: without_p * 16 / 9,
height: without_p,
})
} else if let Some((w, h)) = resolution.split_once('x') {
Ok(Resolution {
width: w
.parse()
.map_err(|_| anyhow!("Could not find resolution"))?,
height: h
.parse()
.map_err(|_| anyhow!("Could not find resolution"))?,
})
} else {
bail!("Could not find resolution")
}
}
/// Dirty implementation of [`f32::fract`] with more accuracy.
pub fn fract(input: f32) -> f32 {
if input.fract() == 0.0 {
return 0.0;
}
format!("0.{}", input.to_string().split('.').last().unwrap())
.parse::<f32>()
.unwrap()
}

View file

@ -0,0 +1,73 @@
use async_speed_limit::Limiter;
use crunchyroll_rs::error::Error;
use futures_util::TryStreamExt;
use reqwest::{Client, Request, Response, ResponseBuilderExt};
use std::future::Future;
use std::io;
use std::pin::Pin;
use std::sync::Arc;
use std::task::{Context, Poll};
use tower_service::Service;
#[derive(Clone)]
pub struct RateLimiterService {
client: Arc<Client>,
rate_limiter: Limiter,
}
impl RateLimiterService {
pub fn new(bytes: u32, client: Client) -> Self {
Self {
client: Arc::new(client),
rate_limiter: Limiter::new(bytes as f64),
}
}
}
impl Service<Request> for RateLimiterService {
type Response = Response;
type Error = Error;
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: Request) -> Self::Future {
let client = self.client.clone();
let rate_limiter = self.rate_limiter.clone();
Box::pin(async move {
let mut body = vec![];
let res = client.execute(req).await?;
let _url = res.url().clone().to_string();
let url = _url.as_str();
let mut http_res = http::Response::builder()
.url(res.url().clone())
.status(res.status())
.version(res.version());
*http_res.headers_mut().unwrap() = res.headers().clone();
http_res
.extensions_ref()
.unwrap()
.clone_from(&res.extensions());
let limiter = rate_limiter.limit(
res.bytes_stream()
.map_err(io::Error::other)
.into_async_read(),
);
futures_util::io::copy(limiter, &mut body)
.await
.map_err(|e| Error::Request {
url: url.to_string(),
status: None,
message: e.to_string(),
})?;
Ok(Response::from(http_res.body(body).unwrap()))
})
}
}

View file

@ -0,0 +1,432 @@
use std::io::Read;
use std::process::Stdio;
use std::{
cmp,
collections::{HashMap, HashSet},
mem,
ops::Not,
path::Path,
process::Command,
};
use chrono::TimeDelta;
use crunchyroll_rs::Locale;
use log::debug;
use tempfile::TempPath;
use anyhow::{bail, Result};
use rusty_chromaprint::{Configuration, Fingerprinter};
use super::fmt::format_time_delta;
pub struct SyncAudio {
pub format_id: usize,
pub path: TempPath,
pub locale: Locale,
pub sample_rate: u32,
pub video_idx: usize,
}
#[derive(Debug, Clone, Copy)]
struct TimeRange {
start: f64,
end: f64,
}
pub fn sync_audios(
available_audios: &Vec<SyncAudio>,
sync_tolerance: u32,
sync_precision: u32,
) -> Result<Option<HashMap<usize, TimeDelta>>> {
let mut result: HashMap<usize, TimeDelta> = HashMap::new();
let mut sync_audios = vec![];
let mut chromaprints = HashMap::new();
let mut formats = HashSet::new();
for audio in available_audios {
if formats.contains(&audio.format_id) {
continue;
}
formats.insert(audio.format_id);
sync_audios.push((audio.format_id, &audio.path, audio.sample_rate));
chromaprints.insert(
audio.format_id,
generate_chromaprint(
&audio.path,
audio.sample_rate,
&TimeDelta::zero(),
&TimeDelta::zero(),
&TimeDelta::zero(),
)?,
);
}
sync_audios.sort_by_key(|sync_audio| chromaprints.get(&sync_audio.0).unwrap().len());
let base_audio = sync_audios.remove(0);
let mut start = f64::MAX;
let mut end = f64::MIN;
let mut initial_offsets = HashMap::new();
for audio in &sync_audios {
debug!(
"Initial comparison of format {} to {}",
audio.0, &base_audio.0
);
let (lhs_ranges, rhs_ranges) = compare_chromaprints(
chromaprints.get(&base_audio.0).unwrap(),
chromaprints.get(&audio.0).unwrap(),
sync_tolerance,
);
if lhs_ranges.is_empty() || rhs_ranges.is_empty() {
bail!(
"Failed to sync videos, couldn't find matching audio parts between format {} and {}",
base_audio.0 + 1,
audio.0 + 1
);
}
let lhs_range = lhs_ranges[0];
let rhs_range = rhs_ranges[0];
start = start.min(lhs_range.start);
end = end.max(lhs_range.end);
start = start.min(rhs_range.start);
end = end.max(rhs_range.end);
let offset = TimeDelta::milliseconds(((rhs_range.start - lhs_range.start) * 1000.0) as i64);
initial_offsets.insert(audio.0, TimeDelta::zero().checked_sub(&offset).unwrap());
debug!(
"Found initial offset of {}ms ({} - {} {}s) ({} - {} {}s) for format {} to {}",
offset.num_milliseconds(),
lhs_range.start,
lhs_range.end,
lhs_range.end - lhs_range.start,
rhs_range.start,
rhs_range.end,
rhs_range.end - rhs_range.start,
audio.0,
base_audio.0
);
}
debug!(
"Found matching audio parts at {} - {}, narrowing search",
start, end
);
let start = TimeDelta::milliseconds((start * 1000.0) as i64 - 20000);
let end = TimeDelta::milliseconds((end * 1000.0) as i64 + 20000);
for sync_audio in &sync_audios {
let chromaprint = generate_chromaprint(
sync_audio.1,
sync_audio.2,
&start,
&end,
initial_offsets.get(&sync_audio.0).unwrap(),
)?;
chromaprints.insert(sync_audio.0, chromaprint);
}
let mut runs: HashMap<usize, i64> = HashMap::new();
let iterator_range_limits: i64 = 2 ^ sync_precision as i64;
for i in -iterator_range_limits..=iterator_range_limits {
let base_offset = TimeDelta::milliseconds(
((0.128 / iterator_range_limits as f64 * i as f64) * 1000.0) as i64,
);
chromaprints.insert(
base_audio.0,
generate_chromaprint(base_audio.1, base_audio.2, &start, &end, &base_offset)?,
);
for audio in &sync_audios {
let initial_offset = initial_offsets.get(&audio.0).copied().unwrap();
let offset = find_offset(
(&base_audio.0, chromaprints.get(&base_audio.0).unwrap()),
&base_offset,
(&audio.0, chromaprints.get(&audio.0).unwrap()),
&initial_offset,
&start,
sync_tolerance,
);
if offset.is_none() {
continue;
}
let offset = offset.unwrap();
result.insert(
audio.0,
result
.get(&audio.0)
.copied()
.unwrap_or_default()
.checked_add(&offset)
.unwrap(),
);
runs.insert(audio.0, runs.get(&audio.0).copied().unwrap_or_default() + 1);
}
}
let mut result: HashMap<usize, TimeDelta> = result
.iter()
.map(|(format_id, offset)| {
(
*format_id,
TimeDelta::milliseconds(
offset.num_milliseconds() / runs.get(format_id).copied().unwrap(),
),
)
})
.collect();
result.insert(base_audio.0, TimeDelta::milliseconds(0));
Ok(Some(result))
}
fn find_offset(
lhs: (&usize, &Vec<u32>),
lhs_shift: &TimeDelta,
rhs: (&usize, &Vec<u32>),
rhs_shift: &TimeDelta,
start: &TimeDelta,
sync_tolerance: u32,
) -> Option<TimeDelta> {
let (lhs_ranges, rhs_ranges) = compare_chromaprints(lhs.1, rhs.1, sync_tolerance);
if lhs_ranges.is_empty() || rhs_ranges.is_empty() {
return None;
}
let lhs_range = lhs_ranges[0];
let rhs_range = rhs_ranges[0];
let offset = rhs_range.end - lhs_range.end;
let offset = TimeDelta::milliseconds((offset * 1000.0) as i64)
.checked_add(lhs_shift)?
.checked_sub(rhs_shift)?;
debug!(
"Found offset of {}ms ({} - {} {}s) ({} - {} {}s) for format {} to {}",
offset.num_milliseconds(),
lhs_range.start + start.num_milliseconds() as f64 / 1000.0,
lhs_range.end + start.num_milliseconds() as f64 / 1000.0,
lhs_range.end - lhs_range.start,
rhs_range.start + start.num_milliseconds() as f64 / 1000.0,
rhs_range.end + start.num_milliseconds() as f64 / 1000.0,
rhs_range.end - rhs_range.start,
rhs.0,
lhs.0
);
Some(offset)
}
fn generate_chromaprint(
input_file: &Path,
sample_rate: u32,
start: &TimeDelta,
end: &TimeDelta,
offset: &TimeDelta,
) -> Result<Vec<u32>> {
let mut ss_argument: &TimeDelta = &start.checked_sub(offset).unwrap();
let mut offset_argument = &TimeDelta::zero();
if *offset < TimeDelta::zero() {
ss_argument = start;
offset_argument = offset;
};
let mut printer = Fingerprinter::new(&Configuration::preset_test1());
printer.start(sample_rate, 2)?;
let mut command = Command::new("ffmpeg");
command
.arg("-hide_banner")
.arg("-y")
.args(["-ss", format_time_delta(ss_argument).as_str()]);
if end.is_zero().not() {
command.args(["-to", format_time_delta(end).as_str()]);
}
command
.args(["-itsoffset", format_time_delta(offset_argument).as_str()])
.args(["-i", input_file.to_string_lossy().to_string().as_str()])
.args(["-ac", "2"])
.args([
"-f",
if cfg!(target_endian = "big") {
"s16be"
} else {
"s16le"
},
])
.arg("-");
let mut handle = command
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
// the stdout is read in chunks because keeping all the raw audio data in memory would take up
// a significant amount of space
let mut stdout = handle.stdout.take().unwrap();
let mut buf: [u8; 128_000] = [0; 128_000];
while handle.try_wait()?.is_none() {
loop {
let read_bytes = stdout.read(&mut buf)?;
if read_bytes == 0 {
break;
}
let data: [i16; 64_000] = unsafe { mem::transmute(buf) };
printer.consume(&data[0..(read_bytes / 2)])
}
}
if !handle.wait()?.success() {
bail!("{}", std::io::read_to_string(handle.stderr.unwrap())?)
}
printer.finish();
return Ok(printer.fingerprint().into());
}
fn compare_chromaprints(
lhs_chromaprint: &Vec<u32>,
rhs_chromaprint: &Vec<u32>,
sync_tolerance: u32,
) -> (Vec<TimeRange>, Vec<TimeRange>) {
let lhs_inverse_index = create_inverse_index(lhs_chromaprint);
let rhs_inverse_index = create_inverse_index(rhs_chromaprint);
let mut possible_shifts = HashSet::new();
for lhs_pair in lhs_inverse_index {
let original_point = lhs_pair.0;
for i in -2..=2 {
let modified_point = (original_point as i32 + i) as u32;
if rhs_inverse_index.contains_key(&modified_point) {
let rhs_index = rhs_inverse_index.get(&modified_point).copied().unwrap();
possible_shifts.insert(rhs_index as i32 - lhs_pair.1 as i32);
}
}
}
let mut all_lhs_time_ranges = vec![];
let mut all_rhs_time_ranges = vec![];
for shift_amount in possible_shifts {
let time_range_pair = find_time_ranges(
lhs_chromaprint,
rhs_chromaprint,
shift_amount,
sync_tolerance,
);
if time_range_pair.is_none() {
continue;
}
let (mut lhs_time_ranges, mut rhs_time_ranges) = time_range_pair.unwrap();
let mut lhs_time_ranges: Vec<TimeRange> = lhs_time_ranges
.drain(..)
.filter(|time_range| {
(20.0 < (time_range.end - time_range.start))
&& ((time_range.end - time_range.start) < 180.0)
&& time_range.end > 0.0
})
.collect();
lhs_time_ranges.sort_by(|a, b| (b.end - b.start).total_cmp(&(a.end - a.start)));
let mut rhs_time_ranges: Vec<TimeRange> = rhs_time_ranges
.drain(..)
.filter(|time_range| {
(20.0 < (time_range.end - time_range.start))
&& ((time_range.end - time_range.start) < 180.0)
&& time_range.end > 0.0
})
.collect();
rhs_time_ranges.sort_by(|a, b| (b.end - b.start).total_cmp(&(a.end - a.start)));
if lhs_time_ranges.is_empty() || rhs_time_ranges.is_empty() {
continue;
}
all_lhs_time_ranges.push(lhs_time_ranges[0]);
all_rhs_time_ranges.push(rhs_time_ranges[0]);
}
all_lhs_time_ranges.sort_by(|a, b| (a.end - a.start).total_cmp(&(b.end - b.start)));
all_lhs_time_ranges.reverse();
all_rhs_time_ranges.sort_by(|a, b| (a.end - a.start).total_cmp(&(b.end - b.start)));
all_rhs_time_ranges.reverse();
(all_lhs_time_ranges, all_rhs_time_ranges)
}
fn create_inverse_index(chromaprint: &Vec<u32>) -> HashMap<u32, usize> {
let mut inverse_index = HashMap::with_capacity(chromaprint.capacity());
for (i, fingerprint) in chromaprint.iter().enumerate().take(chromaprint.capacity()) {
inverse_index.insert(*fingerprint, i);
}
inverse_index
}
fn find_time_ranges(
lhs_chromaprint: &[u32],
rhs_chromaprint: &[u32],
shift_amount: i32,
sync_tolerance: u32,
) -> Option<(Vec<TimeRange>, Vec<TimeRange>)> {
let mut lhs_shift: i32 = 0;
let mut rhs_shift: i32 = 0;
if shift_amount < 0 {
lhs_shift -= shift_amount;
} else {
rhs_shift += shift_amount;
}
let mut lhs_matching_timestamps = vec![];
let mut rhs_matching_timestamps = vec![];
let upper_limit =
cmp::min(lhs_chromaprint.len(), rhs_chromaprint.len()) as i32 - shift_amount.abs();
for i in 0..upper_limit {
let lhs_position = i + lhs_shift;
let rhs_position = i + rhs_shift;
let difference = (lhs_chromaprint[lhs_position as usize]
^ rhs_chromaprint[rhs_position as usize])
.count_ones();
if difference > sync_tolerance {
continue;
}
lhs_matching_timestamps.push(lhs_position as f64 * 0.128);
rhs_matching_timestamps.push(rhs_position as f64 * 0.128);
}
lhs_matching_timestamps.push(f64::MAX);
rhs_matching_timestamps.push(f64::MAX);
let lhs_time_ranges = timestamps_to_ranges(lhs_matching_timestamps);
lhs_time_ranges.as_ref()?;
let lhs_time_ranges = lhs_time_ranges.unwrap();
let rhs_time_ranges = timestamps_to_ranges(rhs_matching_timestamps).unwrap();
Some((lhs_time_ranges, rhs_time_ranges))
}
fn timestamps_to_ranges(mut timestamps: Vec<f64>) -> Option<Vec<TimeRange>> {
if timestamps.is_empty() {
return None;
}
timestamps.sort_by(|a, b| a.total_cmp(b));
let mut time_ranges = vec![];
let mut current_range = TimeRange {
start: timestamps[0],
end: timestamps[0],
};
for i in 0..timestamps.len() - 1 {
let current = timestamps[i];
let next = timestamps[i + 1];
if next - current <= 1.0 {
current_range.end = next;
continue;
}
time_ranges.push(current_range);
current_range.start = next;
current_range.end = next;
}
if !time_ranges.is_empty() {
Some(time_ranges)
} else {
None
}
}

View file

@ -0,0 +1,46 @@
use anyhow::{bail, Result};
use crunchyroll_rs::media::{Resolution, Stream, StreamData};
use crunchyroll_rs::Locale;
pub async fn stream_data_from_stream(
stream: &Stream,
resolution: &Resolution,
hardsub_subtitle: Option<Locale>,
) -> Result<Option<(StreamData, StreamData, bool)>> {
let (hardsub_locale, mut contains_hardsub) = if hardsub_subtitle.is_some() {
(hardsub_subtitle, true)
} else {
(None, false)
};
let (mut videos, mut audios) = match stream.stream_data(hardsub_locale).await {
Ok(data) => data,
Err(e) => {
// the error variant is only `crunchyroll_rs::error::Error::Input` when the requested
// hardsub is not available
if let crunchyroll_rs::error::Error::Input { .. } = e {
contains_hardsub = false;
stream.stream_data(None).await?
} else {
bail!(e)
}
}
}
.unwrap();
if videos.iter().any(|v| v.drm.is_some()) || audios.iter().any(|v| v.drm.is_some()) {
bail!("Stream is DRM protected")
}
videos.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
audios.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
let video_variant = match resolution.height {
u64::MAX => Some(videos.into_iter().next().unwrap()),
u64::MIN => Some(videos.into_iter().last().unwrap()),
_ => videos
.into_iter()
.find(|v| resolution.height == v.resolution().unwrap().height),
};
Ok(video_variant.map(|v| (v, audios.first().unwrap().clone(), contains_hardsub)))
}

View file

@ -1,203 +0,0 @@
.TH crunchyroll-go 1 "21 March 2022" "crunchyroll-go" "Crunchyroll Downloader"
.SH NAME
crunchyroll-go - A cli for downloading videos and entire series from crunchyroll.
.SH SYNOPSIS
crunchyroll-go [\fB-h\fR] [\fB-p\fR \fIPROXY\fR] [\fB-q\fR] [\fB-v\fR]
.br
crunchyroll-go help
.br
crunchyroll-go login [\fB--persistent\fR] [\fB--session-id\fR \fISESSION_ID\fR] [\fIusername\fR, \fIpassword\fR]
.br
crunchyroll-go download [\fB-a\fR \fIAUDIO\fR] [\fB-s\fR \fISUBTITLE\fR] [\fB-d\fR \fIDIRECTORY\fR] [\fB-o\fR \fIOUTPUT\fR] [\fB-r\fR \fIRESOLUTION\fR] [\fB-g\fR \fIGOROUTINES\fR] \fIURLs...\fR
.br
crunchyroll-go archive [\fB-l\fR \fILANGUAGE\fR] [\fB-d\fR \fIDIRECTORY\fR] [\fB-o\fR \fIOUTPUT\fR] [\fB-m\fR \fIMERGE BEHAVIOR\fR] [\fB-c\fR \fICOMPRESS\fR] [\fB-r\fR \fIRESOLUTION\fR] [\fB-g\fR \fIGOROUTINES\fR] \fIURLs...\fR
.SH DESCRIPTION
.TP
With \fBcrunchyroll-go\fR you can easily download video and series from crunchyroll.
.TP
Note that you need an \fBcrunchyroll premium\fR account in order to use this tool!
.SH GENERAL OPTIONS
.TP
This options can be passed to every action.
.TP
\fB-h, --help\fR
Shows help.
.TP
\fB-p, --proxy PROXY\fR
Sets a proxy through which all traffic will be routed.
.TP
\fB-q, --quiet\fR
Disables all output.
.TP
\fB-v, --verbose\fR
Shows verbose output.
.SH LOGIN COMMAND
This command logs in to crunchyroll and stores the session id or credentials on the drive. This needs to be done before calling other commands since they need a valid login to operate.
.TP
\fB--persistent\fR
Stores the given credentials permanent on the drive. The *nix path for it is $HOME/.config/crunchy.
.br
NOTE: The credentials are stored in plain text and if you not use \fB--session-id\fR your credentials are used (if you not use the \fB--persistent\fR flag only a session id gets stored regardless if you login with username/password or a session id).
.TP
\fB--session-id SESSION_ID\fR
Login via a session id (which can be extracted from a crunchyroll browser cookie) instead of using username and password.
.SH DOWNLOAD COMMAND
A command to simply download videos. The output file is stored as a \fI.ts\fR file. \fIffmpeg\fR has to be installed if you want to change the format the videos are stored in.
.TP
\fB-a, --audio AUDIO\fR
Forces to download videos with the given audio locale. If no video with this audio locale is available, nothing will be downloaded. Available locales are: ja-JP, en-US, es-419, es-ES, fr-FR, pt-PT, pt-BR, it-IT, de-DE, ru-RU, ar-SA.
.TP
\fB-s, --subtitle SUBTITLE\fR
Forces to download the videos with subtitles in the given locale / language. If no video with this subtitle locale is available, nothing will be downloaded. Available locales are: ja-JP, en-US, es-419, es-ES, fr-FR, pt-PT, pt-BR, it-IT, de-DE, ru-RU, ar-SA.
.TP
\fB-d, --directory DIRECTORY\fR
The directory to download all files to.
.TP
\fB-o, --output OUTPUT\fR
Name of the output file. Formatting is also supported, so if the name contains one or more of the following things, they will get replaced.
{title} » Title of the video.
{series_name} » Name of the series.
{season_name} » Name of the season.
{season_number} » Number of the season.
{episode_number} » Number of the episode.
{resolution} » Resolution of the video.
{fps} » Frame Rate of the video.
{audio} » Audio locale of the video.
{subtitle} » Subtitle locale of the video.
.TP
\fB-r, resolution RESOLUTION\fR
The video resolution. Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or "common-use" words (e.g. best).
Available pixels: 1920x1080, 1280x720, 640x480, 480x360, 426x240.
Available abbreviations: 1080p, 720p, 480p, 360p, 240p.
Available common-use words: best (best available resolution), worst (worst available resolution).
.TP
\fB-g, --goroutines GOROUTINES\fR
Sets the number of parallel downloads for the segments the final video is made of. Default is the number of cores the computer has.
.SH ARCHIVE COMMAND
This command behaves like \fBdownload\fR besides the fact that it requires \fIffmpeg\fR and stores the output only to .mkv files.
.TP
\fB-l, --language LANGUAGE\fR
Audio locales which should be downloaded. Can be used multiple times. Available locales are: ja-JP, en-US, es-419, es-ES, fr-FR, pt-PT, pt-BR, it-IT, de-DE, ru-RU, ar-SA.
.TP
\fB-d, --directory DIRECTORY\fR
The directory to download all files to.
.TP
\fB-o, --output OUTPUT\fR
Name of the output file. Formatting is also supported, so if the name contains one or more of the following things, they will get replaced.
{title} » Title of the video.
{series_name} » Name of the series.
{season_name} » Name of the season.
{season_number} » Number of the season.
{episode_number} » Number of the episode.
{resolution} » Resolution of the video.
{fps} » Frame Rate of the video.
{audio} » Audio locale of the video.
{subtitle} » Subtitle locale of the video.
.TP
\fB-m, --merge MERGE BEHAVIOR\fR
Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio', 'video'. \fB--audio\fR stores one video and only the audio of all other languages, \fBvideo\fR stores all videos of the given languages and their audio, \fBauto\fR (which is the default) only behaves like video if the length of two videos are different (and only for the two videos), else like audio.
.TP
\fB-c, --compress COMPRESS\fR
If is set, all output will be compresses into an archive (every url generates a new one). This flag sets the name of the compressed output file. The file ending specifies the compression algorithm. The following algorithms are supported: gzip, tar, zip.
Just like \fB--output\fR the name can be formatted. But the only option available here is \fI{series_name}\fR.
.TP
\fB-r, resolution RESOLUTION\fR
The video resolution. Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or "common-use" words (e.g. best).
Available pixels: 1920x1080, 1280x720, 640x480, 480x360, 426x240.
Available abbreviations: 1080p, 720p, 480p, 360p, 240p.
Available common-use words: best (best available resolution), worst (worst available resolution).
.TP
\fB-g, --goroutines GOROUTINES\fR
Sets the number of parallel downloads for the segments the final video is made of. Default is the number of cores the computer has.
.SH URL OPTIONS
If you want to download only specific episode of a series, you could either pass every single episode url to the downloader (which is fine for 1 - 3 episodes) or use filtering.
It works pretty simple, just put a specific pattern surrounded by square brackets at the end of the url from the anime you want to download. A season and / or episode as well as a range from where to where episodes should be downloaded can be specified.
Use the list below to get a better overview what is possible
...[E5] - Download the fifth episode.
...[S1] - Download the full first season.
...[-S2] - Download all seasons up to and including season 2.
...[S3E4-] - Download all episodes from and including season 3, episode 4.
...[S1E4-S3] - Download all episodes from and including season 1, episode 4, until and including season 3.
In practise, it would look like this: \fIhttps://beta.crunchyroll.com/series/12345678/example[S1E5-S3E2]\fR.
The \fBS\fR, followed by the number indicates the season number, \fBE\fR, followed by the number indicates an episode number. It doesn't matter if \fBS\fR, \fBE\fR or both are missing. Theoretically \fB[-]\fR is a valid pattern too. Note that \fBS\fR must always stay before \fBE\fR when used.
.SH EXAMPLES
Login via crunchyroll account email and password.
.br
$ crunchyroll-go login user@example.com 12345678
Download a episode normally. Your system locale will be used for the video's audio.
.br
$ crunchyroll-go download https://www.crunchyroll.com/darling-in-the-franxx/episode-1-alone-and-lonesome-759575
Download a episode with 720p and name it to 'darling.mp4'. Note that you need \fBffmpeg\fR to save files which do not have '.ts' as file extension.
.br
$ crunchyroll-go download -o "darling.mp4" -r 720p https://www.crunchyroll.com/darling-in-the-franxx/episode-1-alone-and-lonesome-759575
Download a episode with japanese audio and american subtitles.
.br
$ crunchyroll-go download -a ja-JP -s en-US https://www.crunchyroll.com/darling-in-the-franxx[E3-E5]
Stores the episode in a .mkv file.
.br
$ crunchyroll-go archive https://www.crunchyroll.com/darling-in-the-franxx/darling-in-the-franxx/episode-1-alone-and-lonesome-759575
Downloads the first two episode of Darling in the FranXX and stores it compressed in a file.
.br
$ crunchyroll-go archive -c "ditf.tar.gz" https://www.crunchyroll.com/darling-in-the-franxx/darling-in-the-franxx[E1-E2]
.SH BUGS
If you notice any bug or want an enhancement, feel free to create a new issue or pull request in the GitHub repository.
.SH AUTHOR
ByteDream
.br
Source: https://github.com/ByteDream/crunchyroll-go
.SH COPYRIGHT
Copyright (C) 2022 ByteDream
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.

View file

@ -1,404 +0,0 @@
package crunchyroll
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"regexp"
"strconv"
)
// LOCALE represents a locale / language
type LOCALE string
const (
JP LOCALE = "ja-JP"
US = "en-US"
LA = "es-419"
ES = "es-ES"
FR = "fr-FR"
PT = "pt-PT"
BR = "pt-BR"
IT = "it-IT"
DE = "de-DE"
RU = "ru-RU"
AR = "ar-SA"
)
type Crunchyroll struct {
// Client is the http.Client to perform all requests over
Client *http.Client
// Context can be used to stop requests with Client and is context.Background by default
Context context.Context
// Locale specifies in which language all results should be returned / requested
Locale LOCALE
// SessionID is the crunchyroll session id which was used for authentication
SessionID string
// Config stores parameters which are needed by some api calls
Config struct {
TokenType string
AccessToken string
CountryCode string
Premium bool
Channel string
Policy string
Signature string
KeyPairID string
AccountID string
ExternalID string
MaturityRating string
}
// If cache is true, internal caching is enabled
cache bool
}
// LoginWithCredentials logs in via crunchyroll username or email and password
func LoginWithCredentials(user string, password string, locale LOCALE, client *http.Client) (*Crunchyroll, error) {
sessionIDEndpoint := fmt.Sprintf("https://api.crunchyroll.com/start_session.0.json?version=1.0&access_token=%s&device_type=%s&device_id=%s",
"LNDJgOit5yaRIWN", "com.crunchyroll.windows.desktop", "Az2srGnChW65fuxYz2Xxl1GcZQgtGgI")
sessResp, err := client.Get(sessionIDEndpoint)
if err != nil {
return nil, err
}
defer sessResp.Body.Close()
var data map[string]interface{}
body, _ := io.ReadAll(sessResp.Body)
json.Unmarshal(body, &data)
sessionID := data["data"].(map[string]interface{})["session_id"].(string)
loginEndpoint := "https://api.crunchyroll.com/login.0.json"
authValues := url.Values{}
authValues.Set("session_id", sessionID)
authValues.Set("account", user)
authValues.Set("password", password)
client.Post(loginEndpoint, "application/x-www-form-urlencoded", bytes.NewBufferString(authValues.Encode()))
return LoginWithSessionID(sessionID, locale, client)
}
// LoginWithSessionID logs in via a crunchyroll session id.
// Session ids are automatically generated as a cookie when visiting https://www.crunchyroll.com
func LoginWithSessionID(sessionID string, locale LOCALE, client *http.Client) (*Crunchyroll, error) {
crunchy := &Crunchyroll{
Client: client,
Context: context.Background(),
Locale: locale,
SessionID: sessionID,
cache: true,
}
var endpoint string
var err error
var resp *http.Response
var jsonBody map[string]interface{}
// start session
endpoint = fmt.Sprintf("https://api.crunchyroll.com/start_session.0.json?session_id=%s",
sessionID)
resp, err = client.Get(endpoint)
if err != nil {
return nil, err
}
defer resp.Body.Close()
json.NewDecoder(resp.Body).Decode(&jsonBody)
if _, ok := jsonBody["message"]; ok {
return nil, errors.New("invalid session id")
}
data := jsonBody["data"].(map[string]interface{})
crunchy.Config.CountryCode = data["country_code"].(string)
user := data["user"]
if user == nil {
return nil, errors.New("invalid session id, user is not logged in")
}
if user.(map[string]interface{})["premium"] == "" {
crunchy.Config.Premium = false
crunchy.Config.Channel = "-"
} else {
crunchy.Config.Premium = true
crunchy.Config.Channel = "crunchyroll"
}
var etpRt string
for _, cookie := range resp.Cookies() {
if cookie.Name == "etp_rt" {
etpRt = cookie.Value
break
}
}
// token
endpoint = "https://beta-api.crunchyroll.com/auth/v1/token"
grantType := url.Values{}
grantType.Set("grant_type", "etp_rt_cookie")
authRequest, err := http.NewRequest(http.MethodPost, endpoint, bytes.NewBufferString(grantType.Encode()))
if err != nil {
return nil, err
}
authRequest.Header.Add("Authorization", "Basic bm9haWhkZXZtXzZpeWcwYThsMHE6")
authRequest.Header.Add("Content-Type", "application/x-www-form-urlencoded")
authRequest.AddCookie(&http.Cookie{
Name: "session_id",
Value: sessionID,
})
authRequest.AddCookie(&http.Cookie{
Name: "etp_rt",
Value: etpRt,
})
resp, err = client.Do(authRequest)
if err != nil {
return nil, err
}
defer resp.Body.Close()
json.NewDecoder(resp.Body).Decode(&jsonBody)
crunchy.Config.TokenType = jsonBody["token_type"].(string)
crunchy.Config.AccessToken = jsonBody["access_token"].(string)
// index
endpoint = "https://beta-api.crunchyroll.com/index/v2"
resp, err = crunchy.request(endpoint)
if err != nil {
return nil, err
}
defer resp.Body.Close()
json.NewDecoder(resp.Body).Decode(&jsonBody)
cms := jsonBody["cms"].(map[string]interface{})
crunchy.Config.Policy = cms["policy"].(string)
crunchy.Config.Signature = cms["signature"].(string)
crunchy.Config.KeyPairID = cms["key_pair_id"].(string)
// me
endpoint = "https://beta-api.crunchyroll.com/accounts/v1/me"
resp, err = crunchy.request(endpoint)
if err != nil {
return nil, err
}
defer resp.Body.Close()
json.NewDecoder(resp.Body).Decode(&jsonBody)
crunchy.Config.AccountID = jsonBody["account_id"].(string)
crunchy.Config.ExternalID = jsonBody["external_id"].(string)
//profile
endpoint = "https://beta-api.crunchyroll.com/accounts/v1/me/profile"
resp, err = crunchy.request(endpoint)
if err != nil {
return nil, err
}
defer resp.Body.Close()
json.NewDecoder(resp.Body).Decode(&jsonBody)
crunchy.Config.MaturityRating = jsonBody["maturity_rating"].(string)
return crunchy, nil
}
// request is a base function which handles api requests
func (c *Crunchyroll) request(endpoint string) (*http.Response, error) {
req, err := http.NewRequest(http.MethodGet, endpoint, nil)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", fmt.Sprintf("%s %s", c.Config.TokenType, c.Config.AccessToken))
resp, err := c.Client.Do(req)
if err == nil {
bodyAsBytes, _ := io.ReadAll(resp.Body)
defer resp.Body.Close()
if resp.StatusCode == http.StatusUnauthorized {
return nil, &AccessError{
URL: endpoint,
Body: bodyAsBytes,
}
} else {
var errStruct struct {
Message string `json:"message"`
}
json.NewDecoder(bytes.NewBuffer(bodyAsBytes)).Decode(&errStruct)
if errStruct.Message != "" {
return nil, &AccessError{
URL: endpoint,
Body: bodyAsBytes,
Message: errStruct.Message,
}
}
}
resp.Body = io.NopCloser(bytes.NewBuffer(bodyAsBytes))
}
return resp, err
}
// IsCaching returns if data gets cached or not.
// See SetCaching for more information
func (c *Crunchyroll) IsCaching() bool {
return c.cache
}
// SetCaching enables or disables internal caching of requests made.
// Caching is enabled by default.
// If it is disabled the already cached data still gets called.
// The best way to prevent this is to create a complete new Crunchyroll struct
func (c *Crunchyroll) SetCaching(caching bool) {
c.cache = caching
}
// Search searches a query and returns all found series and movies within the given limit
func (c *Crunchyroll) Search(query string, limit uint) (s []*Series, m []*Movie, err error) {
searchEndpoint := fmt.Sprintf("https://beta-api.crunchyroll.com/content/v1/search?q=%s&n=%d&type=&locale=%s",
query, limit, c.Locale)
resp, err := c.request(searchEndpoint)
if err != nil {
return nil, nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
for _, item := range jsonBody["items"].([]interface{}) {
item := item.(map[string]interface{})
if item["total"].(float64) > 0 {
switch item["type"] {
case "series":
for _, series := range item["items"].([]interface{}) {
series2 := &Series{
crunchy: c,
}
if err := decodeMapToStruct(series, series2); err != nil {
return nil, nil, err
}
if err := decodeMapToStruct(series.(map[string]interface{})["series_metadata"].(map[string]interface{}), series2); err != nil {
return nil, nil, err
}
s = append(s, series2)
}
case "movie_listing":
for _, movie := range item["items"].([]interface{}) {
movie2 := &Movie{
crunchy: c,
}
if err := decodeMapToStruct(movie, movie2); err != nil {
return nil, nil, err
}
m = append(m, movie2)
}
}
}
}
return s, m, nil
}
// FindVideoByName finds a Video (Season or Movie) by its name.
// Use this in combination with ParseVideoURL and hand over the corresponding results
// to this function.
func (c *Crunchyroll) FindVideoByName(seriesName string) (Video, error) {
s, m, err := c.Search(seriesName, 1)
if err != nil {
return nil, err
}
if len(s) > 0 {
return s[0], nil
} else if len(m) > 0 {
return m[0], nil
}
return nil, errors.New("no series or movie could be found")
}
// FindEpisodeByName finds an episode by its crunchyroll series name and episode title.
// Use this in combination with ParseEpisodeURL and hand over the corresponding results
// to this function.
func (c *Crunchyroll) FindEpisodeByName(seriesName, episodeTitle string) ([]*Episode, error) {
video, err := c.FindVideoByName(seriesName)
if err != nil {
return nil, err
}
seasons, err := video.(*Series).Seasons()
if err != nil {
return nil, err
}
var matchingEpisodes []*Episode
for _, season := range seasons {
episodes, err := season.Episodes()
if err != nil {
return nil, err
}
for _, episode := range episodes {
if episode.SlugTitle == episodeTitle {
matchingEpisodes = append(matchingEpisodes, episode)
}
}
}
return matchingEpisodes, nil
}
// ParseVideoURL tries to extract the crunchyroll series / movie name out of the given url
func ParseVideoURL(url string) (seriesName string, ok bool) {
pattern := regexp.MustCompile(`(?m)^https?://(www\.)?crunchyroll\.com(/\w{2}(-\w{2})?)?/(?P<series>[^/]+)/?$`)
if urlMatch := pattern.FindAllStringSubmatch(url, -1); len(urlMatch) != 0 {
groups := regexGroups(urlMatch, pattern.SubexpNames()...)
seriesName = groups["series"]
if seriesName != "" {
ok = true
}
}
return
}
// ParseEpisodeURL tries to extract the crunchyroll series name, title, episode number and web id out of the given crunchyroll url
// Note that the episode number can be misleading. For example if an episode has the episode number 23.5 (slime isekai)
// the episode number will be 235
func ParseEpisodeURL(url string) (seriesName, title string, episodeNumber int, webId int, ok bool) {
pattern := regexp.MustCompile(`(?m)^https?://(www\.)?crunchyroll\.com(/\w{2}(-\w{2})?)?/(?P<series>[^/]+)/episode-(?P<number>\d+)-(?P<title>.+)-(?P<webId>\d+).*`)
if urlMatch := pattern.FindAllStringSubmatch(url, -1); len(urlMatch) != 0 {
groups := regexGroups(urlMatch, pattern.SubexpNames()...)
seriesName = groups["series"]
episodeNumber, _ = strconv.Atoi(groups["number"])
title = groups["title"]
webId, _ = strconv.Atoi(groups["webId"])
if seriesName != "" && title != "" && webId != 0 {
ok = true
}
}
return
}
// ParseBetaSeriesURL tries to extract the season id of the given crunchyroll beta url, pointing to a season
func ParseBetaSeriesURL(url string) (seasonId string, ok bool) {
pattern := regexp.MustCompile(`(?m)^https?://(www\.)?beta\.crunchyroll\.com/(\w{2}/)?series/(?P<seasonId>\w+).*`)
if urlMatch := pattern.FindAllStringSubmatch(url, -1); len(urlMatch) != 0 {
groups := regexGroups(urlMatch, pattern.SubexpNames()...)
seasonId = groups["seasonId"]
ok = true
}
return
}
// ParseBetaEpisodeURL tries to extract the episode id of the given crunchyroll beta url, pointing to an episode
func ParseBetaEpisodeURL(url string) (episodeId string, ok bool) {
pattern := regexp.MustCompile(`(?m)^https?://(www\.)?beta\.crunchyroll\.com/(\w{2}/)?watch/(?P<episodeId>\w+).*`)
if urlMatch := pattern.FindAllStringSubmatch(url, -1); len(urlMatch) != 0 {
groups := regexGroups(urlMatch, pattern.SubexpNames()...)
episodeId = groups["episodeId"]
ok = true
}
return
}

View file

@ -1,395 +0,0 @@
package crunchyroll
import (
"bytes"
"context"
"crypto/aes"
"crypto/cipher"
"fmt"
"github.com/grafov/m3u8"
"io"
"math"
"net/http"
"os"
"os/exec"
"path/filepath"
"strings"
"sync"
"sync/atomic"
"time"
)
// NewDownloader creates a downloader with default settings which should
// fit the most needs
func NewDownloader(context context.Context, writer io.Writer, goroutines int, onSegmentDownload func(segment *m3u8.MediaSegment, current, total int, file *os.File) error) Downloader {
tmp, _ := os.MkdirTemp("", "crunchy_")
return Downloader{
Writer: writer,
TempDir: tmp,
DeleteTempAfter: true,
Context: context,
Goroutines: goroutines,
OnSegmentDownload: onSegmentDownload,
}
}
// Downloader is used to download Format's
type Downloader struct {
// The output is all written to Writer
Writer io.Writer
// TempDir is the directory where the temporary segment files should be stored.
// The files will be placed directly into the root of the directory.
// If empty a random temporary directory on the system's default tempdir
// will be created.
// If the directory does not exist, it will be created
TempDir string
// If DeleteTempAfter is true, the temp directory gets deleted afterwards.
// Note that in case of a hard signal exit (os.Interrupt, ...) the directory
// will NOT be deleted. In such situations try to catch the signal and
// cancel Context
DeleteTempAfter bool
// Context to control the download process with.
// There is a tiny delay when canceling the context and the actual stop of the
// process. So it is not recommend stopping the program immediately after calling
// the cancel function. It's better when canceling it and then exit the program
// when Format.Download throws an error. See the signal handling section in
// cmd/crunchyroll-go/cmd/download.go for an example
Context context.Context
// Goroutines is the number of goroutines to download segments with
Goroutines int
// A method to call when a segment was downloaded.
// Note that the segments are downloaded asynchronously (depending on the count of
// Goroutines) and the function gets called asynchronously too, so for example it is
// first called on segment 1, then segment 254, then segment 3 and so on
OnSegmentDownload func(segment *m3u8.MediaSegment, current, total int, file *os.File) error
// If LockOnSegmentDownload is true, only one OnSegmentDownload function can be called at
// once. Normally (because of the use of goroutines while downloading) multiple could get
// called simultaneously
LockOnSegmentDownload bool
// If FFmpegOpts is not nil, ffmpeg will be used to merge and convert files.
// The given opts will be used as ffmpeg parameters while merging.
//
// If Writer is *os.File and -f (which sets the output format) is not specified, the output
// format will be retrieved by its file ending. If this is not the case and -f is not given,
// the output format will be mpegts / mpeg transport stream.
// Execute 'ffmpeg -muxers' to see all available output formats.
FFmpegOpts []string
}
// download's the given format
func (d Downloader) download(format *Format) error {
if err := format.InitVideo(); err != nil {
return err
}
if _, err := os.Stat(d.TempDir); os.IsNotExist(err) {
if err = os.Mkdir(d.TempDir, 0700); err != nil {
return err
}
}
if d.DeleteTempAfter {
defer os.RemoveAll(d.TempDir)
}
files, err := d.downloadSegments(format)
if err != nil {
return err
}
if d.FFmpegOpts == nil {
return d.mergeSegments(files)
} else {
return d.mergeSegmentsFFmpeg(files)
}
}
// mergeSegments reads every file in tempDir and writes their content to Downloader.Writer.
// The given output file gets created or overwritten if already existing
func (d Downloader) mergeSegments(files []string) error {
for _, file := range files {
select {
case <-d.Context.Done():
return d.Context.Err()
default:
f, err := os.Open(file)
if err != nil {
return err
}
if _, err = io.Copy(d.Writer, f); err != nil {
f.Close()
return err
}
f.Close()
}
}
return nil
}
// mergeSegmentsFFmpeg reads every file in tempDir and merges their content to the outputFile
// with ffmpeg (https://ffmpeg.org/).
// The given output file gets created or overwritten if already existing
func (d Downloader) mergeSegmentsFFmpeg(files []string) error {
list, err := os.Create(filepath.Join(d.TempDir, "list.txt"))
if err != nil {
return err
}
for _, file := range files {
if _, err = fmt.Fprintf(list, "file '%s'\n", file); err != nil {
list.Close()
return err
}
}
list.Close()
// predefined options ... custom options ... predefined output filename
command := []string{
"-y",
"-f", "concat",
"-safe", "0",
"-i", list.Name(),
"-c", "copy",
}
if d.FFmpegOpts != nil {
command = append(command, d.FFmpegOpts...)
}
var tmpfile string
if _, ok := d.Writer.(*io.PipeWriter); !ok {
if file, ok := d.Writer.(*os.File); ok {
tmpfile = file.Name()
}
}
if filepath.Ext(tmpfile) == "" {
// checks if the -f flag is set (overwrites the output format)
var hasF bool
for _, opts := range d.FFmpegOpts {
if strings.TrimSpace(opts) == "-f" {
hasF = true
break
}
}
if !hasF {
command = append(command, "-f", "matroska")
f, err := os.CreateTemp(d.TempDir, "")
if err != nil {
return err
}
f.Close()
tmpfile = f.Name()
}
}
command = append(command, tmpfile)
var errBuf bytes.Buffer
cmd := exec.CommandContext(d.Context, "ffmpeg",
command...)
cmd.Stderr = &errBuf
if err = cmd.Run(); err != nil {
if errBuf.Len() > 0 {
return fmt.Errorf(errBuf.String())
} else {
return err
}
}
if f, ok := d.Writer.(*os.File); !ok || f.Name() != tmpfile {
file, err := os.Open(tmpfile)
if err != nil {
return err
}
defer file.Close()
_, err = io.Copy(d.Writer, file)
}
return err
}
// downloadSegments downloads every mpeg transport stream segment to a given
// directory (more information below).
// After every segment download onSegmentDownload will be called with:
// the downloaded segment, the current position, the total size of segments to download,
// the file where the segment content was written to an error (if occurred).
// The filename is always <number of downloaded segment>.ts
//
// Short explanation:
// The actual crunchyroll video is split up in multiple segments (or video files) which
// have to be downloaded and merged after to generate a single video file.
// And this function just downloads each of this segment into the given directory.
// See https://en.wikipedia.org/wiki/MPEG_transport_stream for more information
func (d Downloader) downloadSegments(format *Format) ([]string, error) {
if err := format.InitVideo(); err != nil {
return nil, err
}
var wg sync.WaitGroup
var lock sync.Mutex
chunkSize := int(math.Ceil(float64(format.Video.Chunklist.Count()) / float64(d.Goroutines)))
// when a onSegmentDownload call returns an error, this context will be set cancelled and stop all goroutines
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
// receives the decrypt block and iv from the first segment.
// in my tests, only the first segment has specified this data, so the decryption data from this first segments will be used in every other segment too
block, iv, err := getCrypt(format, format.Video.Chunklist.Segments[0])
if err != nil {
return nil, err
}
var total int32
for i := 0; i < int(format.Video.Chunklist.Count()); i += chunkSize {
wg.Add(1)
end := i + chunkSize
if end > int(format.Video.Chunklist.Count()) {
end = int(format.Video.Chunklist.Count())
}
i := i
go func() {
defer wg.Done()
for j, segment := range format.Video.Chunklist.Segments[i:end] {
select {
case <-d.Context.Done():
case <-ctx.Done():
return
default:
var file *os.File
for k := 0; k < 3; k++ {
filename := filepath.Join(d.TempDir, fmt.Sprintf("%d.ts", i+j))
file, err = d.downloadSegment(format, segment, filename, block, iv)
if err == nil {
break
}
if k == 2 {
file.Close()
cancel()
return
}
select {
case <-d.Context.Done():
case <-ctx.Done():
file.Close()
return
case <-time.After(5 * time.Duration(k) * time.Second):
// sleep if an error occurs. very useful because sometimes the connection times out
}
}
if d.OnSegmentDownload != nil {
if d.LockOnSegmentDownload {
lock.Lock()
}
if err = d.OnSegmentDownload(segment, int(atomic.AddInt32(&total, 1)), int(format.Video.Chunklist.Count()), file); err != nil {
if d.LockOnSegmentDownload {
lock.Unlock()
}
file.Close()
return
}
if d.LockOnSegmentDownload {
lock.Unlock()
}
}
file.Close()
}
}
}()
}
wg.Wait()
select {
case <-d.Context.Done():
return nil, d.Context.Err()
case <-ctx.Done():
return nil, err
default:
var files []string
for i := 0; i < int(total); i++ {
files = append(files, filepath.Join(d.TempDir, fmt.Sprintf("%d.ts", i)))
}
return files, nil
}
}
// getCrypt extracts the key and iv of a m3u8 segment and converts it into a cipher.Block and an iv byte sequence
func getCrypt(format *Format, segment *m3u8.MediaSegment) (block cipher.Block, iv []byte, err error) {
var resp *http.Response
resp, err = format.crunchy.Client.Get(segment.Key.URI)
if err != nil {
return nil, nil, err
}
defer resp.Body.Close()
key, err := io.ReadAll(resp.Body)
block, err = aes.NewCipher(key)
if err != nil {
return nil, nil, err
}
iv = []byte(segment.Key.IV)
if len(iv) == 0 {
iv = key
}
return block, iv, nil
}
// downloadSegment downloads a segment, decrypts it and names it after the given index
func (d Downloader) downloadSegment(format *Format, segment *m3u8.MediaSegment, filename string, block cipher.Block, iv []byte) (*os.File, error) {
// every segment is aes-128 encrypted and has to be decrypted when downloaded
content, err := d.decryptSegment(format.crunchy.Client, segment, block, iv)
if err != nil {
return nil, err
}
file, err := os.Create(filename)
if err != nil {
return nil, err
}
defer file.Close()
if _, err = file.Write(content); err != nil {
return nil, err
}
return file, nil
}
// https://github.com/oopsguy/m3u8/blob/4150e93ec8f4f8718875a02973f5d792648ecb97/tool/crypt.go#L25
func (d Downloader) decryptSegment(client *http.Client, segment *m3u8.MediaSegment, block cipher.Block, iv []byte) ([]byte, error) {
req, err := http.NewRequestWithContext(d.Context, http.MethodGet, segment.URI, nil)
if err != nil {
return nil, err
}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
raw, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
blockMode := cipher.NewCBCDecrypter(block, iv[:block.BlockSize()])
decrypted := make([]byte, len(raw))
blockMode.CryptBlocks(decrypted, raw)
raw = d.pkcs5UnPadding(decrypted)
return raw, nil
}
// https://github.com/oopsguy/m3u8/blob/4150e93ec8f4f8718875a02973f5d792648ecb97/tool/crypt.go#L47
func (d Downloader) pkcs5UnPadding(origData []byte) []byte {
length := len(origData)
unPadding := int(origData[length-1])
return origData[:(length - unPadding)]
}

View file

@ -1,212 +0,0 @@
package crunchyroll
import (
"encoding/json"
"fmt"
"regexp"
"strconv"
"strings"
"time"
)
type Episode struct {
crunchy *Crunchyroll
children []*Stream
ID string `json:"id"`
ChannelID string `json:"channel_id"`
SeriesID string `json:"series_id"`
SeriesTitle string `json:"series_title"`
SeriesSlugTitle string `json:"series_slug_title"`
SeasonID string `json:"season_id"`
SeasonTitle string `json:"season_title"`
SeasonSlugTitle string `json:"season_slug_title"`
SeasonNumber int `json:"season_number"`
Episode string `json:"episode"`
EpisodeNumber int `json:"episode_number"`
SequenceNumber float64 `json:"sequence_number"`
ProductionEpisodeID string `json:"production_episode_id"`
Title string `json:"title"`
SlugTitle string `json:"slug_title"`
Description string `json:"description"`
NextEpisodeID string `json:"next_episode_id"`
NextEpisodeTitle string `json:"next_episode_title"`
HDFlag bool `json:"hd_flag"`
IsMature bool `json:"is_mature"`
MatureBlocked bool `json:"mature_blocked"`
EpisodeAirDate time.Time `json:"episode_air_date"`
IsSubbed bool `json:"is_subbed"`
IsDubbed bool `json:"is_dubbed"`
IsClip bool `json:"is_clip"`
SeoTitle string `json:"seo_title"`
SeoDescription string `json:"seo_description"`
SeasonTags []string `json:"season_tags"`
AvailableOffline bool `json:"available_offline"`
Slug string `json:"slug"`
Images struct {
Thumbnail [][]struct {
Width int `json:"width"`
Height int `json:"height"`
Type string `json:"type"`
Source string `json:"source"`
} `json:"thumbnail"`
} `json:"images"`
DurationMS int `json:"duration_ms"`
IsPremiumOnly bool `json:"is_premium_only"`
ListingID string `json:"listing_id"`
SubtitleLocales []LOCALE `json:"subtitle_locales"`
Playback string `json:"playback"`
AvailabilityNotes string `json:"availability_notes"`
StreamID string
}
// EpisodeFromID returns an episode by its api id
func EpisodeFromID(crunchy *Crunchyroll, id string) (*Episode, error) {
resp, err := crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/episodes/%s?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
crunchy.Config.CountryCode,
crunchy.Config.MaturityRating,
crunchy.Config.Channel,
id,
crunchy.Locale,
crunchy.Config.Signature,
crunchy.Config.Policy,
crunchy.Config.KeyPairID))
if err != nil {
return nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
episode := &Episode{
crunchy: crunchy,
ID: id,
}
if err := decodeMapToStruct(jsonBody, episode); err != nil {
return nil, err
}
if episode.Playback != "" {
streamHref := jsonBody["__links__"].(map[string]interface{})["streams"].(map[string]interface{})["href"].(string)
if match := regexp.MustCompile(`(?m)^/cms/v2/\S+videos/(\w+)/streams$`).FindAllStringSubmatch(streamHref, -1); len(match) > 0 {
episode.StreamID = match[0][1]
}
}
return episode, nil
}
// AudioLocale returns the audio locale of the episode.
// Every episode in a season (should) have the same audio locale,
// so if you want to get the audio locale of a season, just call this method on the first episode of the season
func (e *Episode) AudioLocale() (LOCALE, error) {
streams, err := e.Streams()
if err != nil {
return "", err
}
return streams[0].AudioLocale, nil
}
// GetFormat returns the format which matches the given resolution and subtitle locale
func (e *Episode) GetFormat(resolution string, subtitle LOCALE, hardsub bool) (*Format, error) {
streams, err := e.Streams()
if err != nil {
return nil, err
}
var foundStream *Stream
for _, stream := range streams {
if hardsub && stream.HardsubLocale == subtitle || stream.HardsubLocale == "" && subtitle == "" {
foundStream = stream
break
} else if !hardsub {
for _, streamSubtitle := range stream.Subtitles {
if streamSubtitle.Locale == subtitle {
foundStream = stream
break
}
}
if foundStream != nil {
break
}
}
}
if foundStream == nil {
return nil, fmt.Errorf("no matching stream found")
}
formats, err := foundStream.Formats()
if err != nil {
return nil, err
}
var res *Format
for _, format := range formats {
if resolution == "worst" || resolution == "best" {
if res == nil {
res = format
continue
}
curSplitRes := strings.SplitN(format.Video.Resolution, "x", 2)
curResX, _ := strconv.Atoi(curSplitRes[0])
curResY, _ := strconv.Atoi(curSplitRes[1])
resSplitRes := strings.SplitN(res.Video.Resolution, "x", 2)
resResX, _ := strconv.Atoi(resSplitRes[0])
resResY, _ := strconv.Atoi(resSplitRes[1])
if resolution == "worst" && curResX+curResY < resResX+resResY {
res = format
} else if resolution == "best" && curResX+curResY > resResX+resResY {
res = format
}
}
if format.Video.Resolution == resolution {
return format, nil
}
}
if res != nil {
return res, nil
}
return nil, fmt.Errorf("no matching resolution found")
}
// Streams returns all streams which are available for the episode
func (e *Episode) Streams() ([]*Stream, error) {
if e.children != nil {
return e.children, nil
}
streams, err := fromVideoStreams(e.crunchy, fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/videos/%s/streams?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
e.crunchy.Config.CountryCode,
e.crunchy.Config.MaturityRating,
e.crunchy.Config.Channel,
e.StreamID,
e.crunchy.Locale,
e.crunchy.Config.Signature,
e.crunchy.Config.Policy,
e.crunchy.Config.KeyPairID))
if err != nil {
return nil, err
}
if e.crunchy.cache {
e.children = streams
}
return streams, nil
}

View file

@ -1,21 +0,0 @@
package crunchyroll
import "fmt"
// AccessError is an error which will be returned when some special sort of api request fails.
// See Crunchyroll.request when the error gets used
type AccessError struct {
error
URL string
Body []byte
Message string
}
func (ae *AccessError) Error() string {
if ae.Message == "" {
return fmt.Sprintf("Access token invalid for url %s\nBody: %s", ae.URL, string(ae.Body))
} else {
return ae.Message
}
}

59
flake.lock generated Normal file
View file

@ -0,0 +1,59 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1710534455,
"narHash": "sha256-huQT4Xs0y4EeFKn2BTBVYgEwJSv8SDlm82uWgMnCMmI=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "9af9c1c87ed3e3ed271934cb896e0cdd33dae212",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixpkgs-unstable",
"type": "indirect"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs",
"utils": "utils"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"id": "flake-utils",
"type": "indirect"
}
}
},
"root": "root",
"version": 7
}

76
flake.nix Normal file
View file

@ -0,0 +1,76 @@
{
inputs = {
nixpkgs.url = "flake:nixpkgs/nixpkgs-unstable";
utils.url = "flake:flake-utils";
};
outputs = { self, nixpkgs, utils }: utils.lib.eachDefaultSystem
(system:
let
# enable musl on Linux will trigger a toolchain rebuild
# making the build very slow
pkgs = import nixpkgs { inherit system; };
# if nixpkgs.legacyPackages.${system}.stdenv.hostPlatform.isLinux
# then nixpkgs.legacyPackages.${system}.pkgsMusl
# else nixpkgs.legacyPackages.${system};
crunchy-cli = pkgs.rustPlatform.buildRustPackage.override { stdenv = pkgs.clangStdenv; } rec {
pname = "crunchy-cli";
inherit ((pkgs.lib.importTOML ./Cargo.toml).package) version;
src = pkgs.lib.cleanSource ./.;
cargoLock = {
lockFile = ./Cargo.lock;
allowBuiltinFetchGit = true;
};
buildNoDefaultFeatures = true;
buildFeatures = [ "openssl-tls" ];
nativeBuildInputs = [
pkgs.pkg-config
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
pkgs.xcbuild
];
buildInputs = [
pkgs.openssl
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
pkgs.darwin.Security
];
};
in
{
packages.default = crunchy-cli;
devShells.default = pkgs.mkShell {
packages = with pkgs; [
cargo
clippy
rust-analyzer
rustc
rustfmt
];
inputsFrom = builtins.attrValues self.packages.${system};
buildInputs = [
pkgs.openssl
pkgs.libiconv
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
pkgs.darwin.apple_sdk.frameworks.CoreServices
pkgs.darwin.Security
];
RUST_SRC_PATH = pkgs.rustPlatform.rustLibSrc;
};
formatter = pkgs.nixpkgs-fmt;
}
) // {
overlays.default = final: prev: {
inherit (self.packages.${final.system}) crunchy-cli;
};
};
}

View file

@ -1,51 +0,0 @@
package crunchyroll
import (
"github.com/grafov/m3u8"
)
type FormatType string
const (
EPISODE FormatType = "episodes"
MOVIE = "movies"
)
type Format struct {
crunchy *Crunchyroll
ID string
// FormatType represents if the format parent is an episode or a movie
FormatType FormatType
Video *m3u8.Variant
AudioLocale LOCALE
Hardsub LOCALE
Subtitles []*Subtitle
}
// InitVideo initializes the Format.Video completely.
// The Format.Video.Chunklist pointer is, by default, nil because an additional
// request must be made to receive its content. The request is not made when
// initializing a Format struct because it would probably cause an intense overhead
// since Format.Video.Chunklist is only used sometimes
func (f *Format) InitVideo() error {
if f.Video.Chunklist == nil {
resp, err := f.crunchy.Client.Get(f.Video.URI)
if err != nil {
return err
}
defer resp.Body.Close()
playlist, _, err := m3u8.DecodeFrom(resp.Body, true)
if err != nil {
return err
}
f.Video.Chunklist = playlist.(*m3u8.MediaPlaylist)
}
return nil
}
// Download downloads the Format with the via Downloader specified options
func (f *Format) Download(downloader Downloader) error {
return downloader.download(f)
}

8
go.mod
View file

@ -1,8 +0,0 @@
module github.com/ByteDream/crunchyroll-go
go 1.16
require (
github.com/grafov/m3u8 v0.11.1
github.com/spf13/cobra v1.4.0
)

12
go.sum
View file

@ -1,12 +0,0 @@
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/grafov/m3u8 v0.11.1 h1:igZ7EBIB2IAsPPazKwRKdbhxcoBKO3lO1UY57PZDeNA=
github.com/grafov/m3u8 v0.11.1/go.mod h1:nqzOkfBiZJENr52zTVd/Dcl03yzphIMbJqkXGu+u080=
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/spf13/cobra v1.4.0 h1:y+wJpx64xcgO1V+RcnwW0LEHxTKRi2ZDPSBjWnrg88Q=
github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=

View file

@ -1,100 +0,0 @@
package crunchyroll
import (
"encoding/json"
"fmt"
)
type MovieListing struct {
crunchy *Crunchyroll
ID string `json:"id"`
Title string `json:"title"`
Slug string `json:"slug"`
SlugTitle string `json:"slug_title"`
Description string `json:"description"`
Images struct {
Thumbnail [][]struct {
Width int `json:"width"`
Height int `json:"height"`
Type string `json:"type"`
Source string `json:"source"`
} `json:"thumbnail"`
} `json:"images"`
DurationMS int `json:"duration_ms"`
IsPremiumOnly bool `json:"is_premium_only"`
ListeningID string `json:"listening_id"`
IsMature bool `json:"is_mature"`
AvailableOffline bool `json:"available_offline"`
IsSubbed bool `json:"is_subbed"`
IsDubbed bool `json:"is_dubbed"`
Playback string `json:"playback"`
AvailabilityNotes string `json:"availability_notes"`
}
// MovieListingFromID returns a movie listing by its api id
func MovieListingFromID(crunchy *Crunchyroll, id string) (*MovieListing, error) {
resp, err := crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/movie_listing/%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
crunchy.Config.CountryCode,
crunchy.Config.MaturityRating,
crunchy.Config.Channel,
id,
crunchy.Locale,
crunchy.Config.Signature,
crunchy.Config.Policy,
crunchy.Config.KeyPairID))
if err != nil {
return nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
movieListing := &MovieListing{
crunchy: crunchy,
ID: id,
}
if err = decodeMapToStruct(jsonBody, movieListing); err != nil {
return nil, err
}
return movieListing, nil
}
// AudioLocale is same as Episode.AudioLocale
func (ml *MovieListing) AudioLocale() (LOCALE, error) {
resp, err := ml.crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/videos/%s/streams?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
ml.crunchy.Config.CountryCode,
ml.crunchy.Config.MaturityRating,
ml.crunchy.Config.Channel,
ml.ID,
ml.crunchy.Locale,
ml.crunchy.Config.Signature,
ml.crunchy.Config.Policy,
ml.crunchy.Config.KeyPairID))
if err != nil {
return "", err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
return LOCALE(jsonBody["audio_locale"].(string)), nil
}
// Streams returns all streams which are available for the movie listing
func (ml *MovieListing) Streams() ([]*Stream, error) {
return fromVideoStreams(ml.crunchy, fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/videos/%s/streams?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
ml.crunchy.Config.CountryCode,
ml.crunchy.Config.MaturityRating,
ml.crunchy.Config.Channel,
ml.ID,
ml.crunchy.Locale,
ml.crunchy.Config.Signature,
ml.crunchy.Config.Policy,
ml.crunchy.Config.KeyPairID))
}

123
season.go
View file

@ -1,123 +0,0 @@
package crunchyroll
import (
"encoding/json"
"fmt"
"regexp"
)
type Season struct {
crunchy *Crunchyroll
children []*Episode
ID string `json:"id"`
ChannelID string `json:"channel_id"`
Title string `json:"title"`
SlugTitle string `json:"slug_title"`
SeriesID string `json:"series_id"`
SeasonNumber int `json:"season_number"`
IsComplete bool `json:"is_complete"`
Description string `json:"description"`
Keywords []string `json:"keywords"`
SeasonTags []string `json:"season_tags"`
IsMature bool `json:"is_mature"`
MatureBlocked bool `json:"mature_blocked"`
IsSubbed bool `json:"is_subbed"`
IsDubbed bool `json:"is_dubbed"`
IsSimulcast bool `json:"is_simulcast"`
SeoTitle string `json:"seo_title"`
SeoDescription string `json:"seo_description"`
AvailabilityNotes string `json:"availability_notes"`
// the locales are always empty, idk why this may change in the future
AudioLocales []LOCALE
SubtitleLocales []LOCALE
}
// SeasonFromID returns a season by its api id
func SeasonFromID(crunchy *Crunchyroll, id string) (*Season, error) {
resp, err := crunchy.Client.Get(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/seasons/%s?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
crunchy.Config.CountryCode,
crunchy.Config.MaturityRating,
crunchy.Config.Channel,
id,
crunchy.Locale,
crunchy.Config.Signature,
crunchy.Config.Policy,
crunchy.Config.KeyPairID))
if err != nil {
return nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
season := &Season{
crunchy: crunchy,
ID: id,
}
if err := decodeMapToStruct(jsonBody, season); err != nil {
return nil, err
}
return season, nil
}
func (s *Season) AudioLocale() (LOCALE, error) {
episodes, err := s.Episodes()
if err != nil {
return "", err
}
return episodes[0].AudioLocale()
}
// Episodes returns all episodes which are available for the season
func (s *Season) Episodes() (episodes []*Episode, err error) {
if s.children != nil {
return s.children, nil
}
resp, err := s.crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/episodes?season_id=%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
s.crunchy.Config.CountryCode,
s.crunchy.Config.MaturityRating,
s.crunchy.Config.Channel,
s.ID,
s.crunchy.Locale,
s.crunchy.Config.Signature,
s.crunchy.Config.Policy,
s.crunchy.Config.KeyPairID))
if err != nil {
return nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
for _, item := range jsonBody["items"].([]interface{}) {
episode := &Episode{
crunchy: s.crunchy,
}
if err = decodeMapToStruct(item, episode); err != nil {
return nil, err
}
if episode.Playback != "" {
streamHref := item.(map[string]interface{})["__links__"].(map[string]interface{})["streams"].(map[string]interface{})["href"].(string)
if match := regexp.MustCompile(`(?m)^/cms/v2/\S+videos/(\w+)/streams$`).FindAllStringSubmatch(streamHref, -1); len(match) > 0 {
episode.StreamID = match[0][1]
}
}
episodes = append(episodes, episode)
}
if s.crunchy.cache {
s.children = episodes
}
return
}

12
src/main.rs Normal file
View file

@ -0,0 +1,12 @@
#[cfg(not(any(
feature = "rustls-tls",
feature = "native-tls",
feature = "openssl-tls",
feature = "openssl-tls-static"
)))]
compile_error!("At least one tls feature must be activated");
#[tokio::main]
async fn main() {
crunchy_cli_core::main(&std::env::args().collect::<Vec<String>>()).await
}

126
stream.go
View file

@ -1,126 +0,0 @@
package crunchyroll
import (
"encoding/json"
"errors"
"fmt"
"github.com/grafov/m3u8"
"regexp"
)
type Stream struct {
crunchy *Crunchyroll
children []*Format
HardsubLocale LOCALE
AudioLocale LOCALE
Subtitles []*Subtitle
formatType FormatType
id string
streamURL string
}
// StreamsFromID returns a stream by its api id
func StreamsFromID(crunchy *Crunchyroll, id string) ([]*Stream, error) {
return fromVideoStreams(crunchy, fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/videos/%s/streams?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
crunchy.Config.CountryCode,
crunchy.Config.MaturityRating,
crunchy.Config.Channel,
id,
crunchy.Locale,
crunchy.Config.Signature,
crunchy.Config.Policy,
crunchy.Config.KeyPairID))
}
// Formats returns all formats which are available for the stream
func (s *Stream) Formats() ([]*Format, error) {
if s.children != nil {
return s.children, nil
}
resp, err := s.crunchy.Client.Get(s.streamURL)
if err != nil {
return nil, err
}
defer resp.Body.Close()
playlist, _, err := m3u8.DecodeFrom(resp.Body, true)
if err != nil {
return nil, err
}
var formats []*Format
for _, variant := range playlist.(*m3u8.MasterPlaylist).Variants {
formats = append(formats, &Format{
crunchy: s.crunchy,
ID: s.id,
FormatType: s.formatType,
Video: variant,
AudioLocale: s.AudioLocale,
Hardsub: s.HardsubLocale,
Subtitles: s.Subtitles,
})
}
if s.crunchy.cache {
s.children = formats
}
return formats, nil
}
// fromVideoStreams returns all streams which are accessible via the endpoint
func fromVideoStreams(crunchy *Crunchyroll, endpoint string) (streams []*Stream, err error) {
resp, err := crunchy.request(endpoint)
if err != nil {
return nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
if len(jsonBody) == 0 {
// this may get thrown when the crunchyroll account has just a normal account and not one with premium
return nil, errors.New("no stream available")
}
audioLocale := jsonBody["audio_locale"].(string)
var subtitles []*Subtitle
for _, rawSubtitle := range jsonBody["subtitles"].(map[string]interface{}) {
subtitle := &Subtitle{
crunchy: crunchy,
}
decodeMapToStruct(rawSubtitle.(map[string]interface{}), subtitle)
subtitles = append(subtitles, subtitle)
}
for _, streamData := range jsonBody["streams"].(map[string]interface{})["adaptive_hls"].(map[string]interface{}) {
streamData := streamData.(map[string]interface{})
hardsubLocale := streamData["hardsub_locale"].(string)
var id string
var formatType FormatType
href := jsonBody["__links__"].(map[string]interface{})["resource"].(map[string]interface{})["href"].(string)
if match := regexp.MustCompile(`(?sm)^/cms/v2/\S+/crunchyroll/(\w+)/(\w+)$`).FindAllStringSubmatch(href, -1); len(match) > 0 {
formatType = FormatType(match[0][1])
id = match[0][2]
}
stream := &Stream{
crunchy: crunchy,
HardsubLocale: LOCALE(hardsubLocale),
formatType: formatType,
id: id,
streamURL: streamData["url"].(string),
AudioLocale: LOCALE(audioLocale),
Subtitles: subtitles,
}
streams = append(streams, stream)
}
return
}

View file

@ -1,30 +0,0 @@
package crunchyroll
import (
"io"
"net/http"
)
type Subtitle struct {
crunchy *Crunchyroll
Locale LOCALE `json:"locale"`
URL string `json:"url"`
Format string `json:"format"`
}
func (s Subtitle) Save(writer io.Writer) error {
req, err := http.NewRequestWithContext(s.crunchy.Context, http.MethodGet, s.URL, nil)
if err != nil {
return err
}
resp, err := s.crunchy.Client.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
_, err = io.Copy(writer, resp.Body)
return err
}

110
url.go
View file

@ -1,110 +0,0 @@
package crunchyroll
import (
"fmt"
)
// ExtractEpisodesFromUrl extracts all episodes from an url.
// If audio is not empty, the episodes gets filtered after the given locale
func (c *Crunchyroll) ExtractEpisodesFromUrl(url string, audio ...LOCALE) ([]*Episode, error) {
series, episodes, err := c.ParseUrl(url)
if err != nil {
return nil, err
}
var eps []*Episode
if series != nil {
seasons, err := series.Seasons()
if err != nil {
return nil, err
}
for _, season := range seasons {
if audio != nil {
locale, err := season.AudioLocale()
if err != nil {
return nil, err
}
var found bool
for _, l := range audio {
if locale == l {
found = true
break
}
}
if !found {
continue
}
}
e, err := season.Episodes()
if err != nil {
return nil, err
}
eps = append(eps, e...)
}
} else if episodes != nil {
if audio == nil {
return episodes, nil
}
for _, episode := range episodes {
locale, err := episode.AudioLocale()
if err != nil {
return nil, err
}
if audio != nil {
var found bool
for _, l := range audio {
if locale == l {
found = true
break
}
}
if !found {
continue
}
}
eps = append(eps, episode)
}
}
if len(eps) == 0 {
return nil, fmt.Errorf("could not find any matching episode")
}
return eps, nil
}
// ParseUrl parses the given url into a series or episode.
// The returning episode is a slice because non-beta urls have the same episode with different languages
func (c *Crunchyroll) ParseUrl(url string) (*Series, []*Episode, error) {
if seriesId, ok := ParseBetaSeriesURL(url); ok {
series, err := SeriesFromID(c, seriesId)
if err != nil {
return nil, nil, err
}
return series, nil, nil
} else if episodeId, ok := ParseBetaEpisodeURL(url); ok {
episode, err := EpisodeFromID(c, episodeId)
if err != nil {
return nil, nil, err
}
return nil, []*Episode{episode}, nil
} else if seriesName, ok := ParseVideoURL(url); ok {
video, err := c.FindVideoByName(seriesName)
if err != nil {
return nil, nil, err
}
return video.(*Series), nil, nil
} else if seriesName, title, _, _, ok := ParseEpisodeURL(url); ok {
episodes, err := c.FindEpisodeByName(seriesName, title)
if err != nil {
return nil, nil, err
}
return nil, episodes, nil
} else {
return nil, nil, fmt.Errorf("invalid url %s", url)
}
}

View file

@ -1,25 +0,0 @@
package crunchyroll
import (
"encoding/json"
)
func decodeMapToStruct(m interface{}, s interface{}) error {
jsonBody, err := json.Marshal(m)
if err != nil {
return err
}
return json.Unmarshal(jsonBody, s)
}
func regexGroups(parsed [][]string, subexpNames ...string) map[string]string {
groups := map[string]string{}
for _, match := range parsed {
for i, content := range match {
if subexpName := subexpNames[i]; subexpName != "" {
groups[subexpName] = content
}
}
}
return groups
}

View file

@ -1,59 +0,0 @@
package utils
import (
"github.com/ByteDream/crunchyroll-go"
)
var AllLocales = []crunchyroll.LOCALE{
crunchyroll.JP,
crunchyroll.US,
crunchyroll.LA,
crunchyroll.ES,
crunchyroll.FR,
crunchyroll.PT,
crunchyroll.BR,
crunchyroll.IT,
crunchyroll.DE,
crunchyroll.RU,
crunchyroll.AR,
}
// ValidateLocale validates if the given locale actually exist
func ValidateLocale(locale crunchyroll.LOCALE) bool {
for _, l := range AllLocales {
if l == locale {
return true
}
}
return false
}
// LocaleLanguage returns the country by its locale
func LocaleLanguage(locale crunchyroll.LOCALE) string {
switch locale {
case crunchyroll.JP:
return "Japanese"
case crunchyroll.US:
return "English (US)"
case crunchyroll.LA:
return "Spanish (Latin America)"
case crunchyroll.ES:
return "Spanish (Spain)"
case crunchyroll.FR:
return "French"
case crunchyroll.PT:
return "Portuguese (Europe)"
case crunchyroll.BR:
return "Portuguese (Brazil)"
case crunchyroll.IT:
return "Italian"
case crunchyroll.DE:
return "German"
case crunchyroll.RU:
return "Russian"
case crunchyroll.AR:
return "Arabic"
default:
return ""
}
}

View file

@ -1,153 +0,0 @@
package utils
import (
"github.com/ByteDream/crunchyroll-go"
"sort"
"strconv"
"strings"
"sync"
)
// SortEpisodesBySeason sorts the given episodes by their seasons.
// Note that the same episodes just with different audio locales will cause problems
func SortEpisodesBySeason(episodes []*crunchyroll.Episode) [][]*crunchyroll.Episode {
sortMap := map[string]map[int][]*crunchyroll.Episode{}
for _, episode := range episodes {
if _, ok := sortMap[episode.SeriesID]; !ok {
sortMap[episode.SeriesID] = map[int][]*crunchyroll.Episode{}
}
if _, ok := sortMap[episode.SeriesID][episode.SeasonNumber]; !ok {
sortMap[episode.SeriesID][episode.SeasonNumber] = make([]*crunchyroll.Episode, 0)
}
sortMap[episode.SeriesID][episode.SeasonNumber] = append(sortMap[episode.SeriesID][episode.SeasonNumber], episode)
}
var eps [][]*crunchyroll.Episode
for _, series := range sortMap {
var keys []int
for seriesNumber := range series {
keys = append(keys, seriesNumber)
}
sort.Ints(keys)
for _, key := range keys {
es := series[key]
if len(es) > 0 {
sort.Sort(EpisodesByNumber(es))
eps = append(eps, es)
}
}
}
return eps
}
// SortEpisodesByAudio sort the given episodes by their audio locale
func SortEpisodesByAudio(episodes []*crunchyroll.Episode) (map[crunchyroll.LOCALE][]*crunchyroll.Episode, error) {
eps := map[crunchyroll.LOCALE][]*crunchyroll.Episode{}
errChan := make(chan error)
var wg sync.WaitGroup
var lock sync.Mutex
for _, episode := range episodes {
episode := episode
wg.Add(1)
go func() {
defer wg.Done()
audioLocale, err := episode.AudioLocale()
if err != nil {
errChan <- err
return
}
lock.Lock()
defer lock.Unlock()
if _, ok := eps[audioLocale]; !ok {
eps[audioLocale] = make([]*crunchyroll.Episode, 0)
}
eps[audioLocale] = append(eps[audioLocale], episode)
}()
}
go func() {
wg.Wait()
errChan <- nil
}()
if err := <-errChan; err != nil {
return nil, err
}
return eps, nil
}
// MovieListingsByDuration sorts movie listings by their duration
type MovieListingsByDuration []*crunchyroll.MovieListing
func (mlbd MovieListingsByDuration) Len() int {
return len(mlbd)
}
func (mlbd MovieListingsByDuration) Swap(i, j int) {
mlbd[i], mlbd[j] = mlbd[j], mlbd[i]
}
func (mlbd MovieListingsByDuration) Less(i, j int) bool {
return mlbd[i].DurationMS < mlbd[j].DurationMS
}
// EpisodesByDuration sorts episodes by their duration
type EpisodesByDuration []*crunchyroll.Episode
func (ebd EpisodesByDuration) Len() int {
return len(ebd)
}
func (ebd EpisodesByDuration) Swap(i, j int) {
ebd[i], ebd[j] = ebd[j], ebd[i]
}
func (ebd EpisodesByDuration) Less(i, j int) bool {
return ebd[i].DurationMS < ebd[j].DurationMS
}
type EpisodesByNumber []*crunchyroll.Episode
func (ebn EpisodesByNumber) Len() int {
return len(ebn)
}
func (ebn EpisodesByNumber) Swap(i, j int) {
ebn[i], ebn[j] = ebn[j], ebn[i]
}
func (ebn EpisodesByNumber) Less(i, j int) bool {
return ebn[i].EpisodeNumber < ebn[j].EpisodeNumber
}
// FormatsByResolution sorts formats after their resolution
type FormatsByResolution []*crunchyroll.Format
func (fbr FormatsByResolution) Len() int {
return len(fbr)
}
func (fbr FormatsByResolution) Swap(i, j int) {
fbr[i], fbr[j] = fbr[j], fbr[i]
}
func (fbr FormatsByResolution) Less(i, j int) bool {
iSplitRes := strings.SplitN(fbr[i].Video.Resolution, "x", 2)
iResX, _ := strconv.Atoi(iSplitRes[0])
iResY, _ := strconv.Atoi(iSplitRes[1])
jSplitRes := strings.SplitN(fbr[j].Video.Resolution, "x", 2)
jResX, _ := strconv.Atoi(jSplitRes[0])
jResY, _ := strconv.Atoi(jSplitRes[1])
return iResX+iResY < jResX+jResY
}
type SubtitlesByLocale []*crunchyroll.Subtitle
func (sbl SubtitlesByLocale) Len() int {
return len(sbl)
}
func (sbl SubtitlesByLocale) Swap(i, j int) {
sbl[i], sbl[j] = sbl[j], sbl[i]
}
func (sbl SubtitlesByLocale) Less(i, j int) bool {
return LocaleLanguage(sbl[i].Locale) < LocaleLanguage(sbl[j].Locale)
}

230
video.go
View file

@ -1,230 +0,0 @@
package crunchyroll
import (
"encoding/json"
"fmt"
)
type video struct {
ID string `json:"id"`
ExternalID string `json:"external_id"`
Description string `json:"description"`
Title string `json:"title"`
Slug string `json:"slug"`
SlugTitle string `json:"slug_title"`
Images struct {
PosterTall [][]struct {
Height int `json:"height"`
Source string `json:"source"`
Type string `json:"type"`
Width int `json:"width"`
} `json:"poster_tall"`
PosterWide [][]struct {
Height int `json:"height"`
Source string `json:"source"`
Type string `json:"type"`
Width int `json:"width"`
} `json:"poster_wide"`
} `json:"images"`
}
type Video interface{}
type Movie struct {
video
Video
crunchy *Crunchyroll
children []*MovieListing
// not generated when calling MovieFromID
MovieListingMetadata struct {
AvailabilityNotes string `json:"availability_notes"`
AvailableOffline bool `json:"available_offline"`
DurationMS int `json:"duration_ms"`
ExtendedDescription string `json:"extended_description"`
FirstMovieID string `json:"first_movie_id"`
IsDubbed bool `json:"is_dubbed"`
IsMature bool `json:"is_mature"`
IsPremiumOnly bool `json:"is_premium_only"`
IsSubbed bool `json:"is_subbed"`
MatureRatings []string `json:"mature_ratings"`
MovieReleaseYear int `json:"movie_release_year"`
SubtitleLocales []LOCALE `json:"subtitle_locales"`
} `json:"movie_listing_metadata"`
Playback string `json:"playback"`
PromoDescription string `json:"promo_description"`
PromoTitle string `json:"promo_title"`
SearchMetadata struct {
Score float64 `json:"score"`
}
}
// MovieFromID returns a movie by its api id
func MovieFromID(crunchy *Crunchyroll, id string) (*Movie, error) {
resp, err := crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/movies/%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
crunchy.Config.CountryCode,
crunchy.Config.MaturityRating,
crunchy.Config.Channel,
id,
crunchy.Locale,
crunchy.Config.Signature,
crunchy.Config.Policy,
crunchy.Config.KeyPairID))
if err != nil {
return nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
movieListing := &Movie{
crunchy: crunchy,
}
movieListing.ID = id
if err = decodeMapToStruct(jsonBody, movieListing); err != nil {
return nil, err
}
return movieListing, nil
}
// MovieListing returns all videos corresponding with the movie.
// Beside the normal movie, sometimes movie previews are returned too, but you can try to get the actual movie
// by sorting the returning MovieListing slice with the utils.MovieListingByDuration interface
func (m *Movie) MovieListing() (movieListings []*MovieListing, err error) {
if m.children != nil {
return m.children, nil
}
resp, err := m.crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/movies?movie_listing_id=%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
m.crunchy.Config.CountryCode,
m.crunchy.Config.MaturityRating,
m.crunchy.Config.Channel,
m.ID,
m.crunchy.Locale,
m.crunchy.Config.Signature,
m.crunchy.Config.Policy,
m.crunchy.Config.KeyPairID))
if err != nil {
return nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
for _, item := range jsonBody["items"].([]interface{}) {
movieListing := &MovieListing{
crunchy: m.crunchy,
}
if err = decodeMapToStruct(item, movieListing); err != nil {
return nil, err
}
movieListings = append(movieListings, movieListing)
}
if m.crunchy.cache {
m.children = movieListings
}
return movieListings, nil
}
type Series struct {
video
Video
crunchy *Crunchyroll
children []*Season
PromoDescription string `json:"promo_description"`
PromoTitle string `json:"promo_title"`
AvailabilityNotes string `json:"availability_notes"`
EpisodeCount int `json:"episode_count"`
ExtendedDescription string `json:"extended_description"`
IsDubbed bool `json:"is_dubbed"`
IsMature bool `json:"is_mature"`
IsSimulcast bool `json:"is_simulcast"`
IsSubbed bool `json:"is_subbed"`
MatureBlocked bool `json:"mature_blocked"`
MatureRatings []string `json:"mature_ratings"`
SeasonCount int `json:"season_count"`
// not generated when calling SeriesFromID
SearchMetadata struct {
Score float64 `json:"score"`
}
}
// SeriesFromID returns a series by its api id
func SeriesFromID(crunchy *Crunchyroll, id string) (*Series, error) {
resp, err := crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/movies?movie_listing_id=%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
crunchy.Config.CountryCode,
crunchy.Config.MaturityRating,
crunchy.Config.Channel,
id,
crunchy.Locale,
crunchy.Config.Signature,
crunchy.Config.Policy,
crunchy.Config.KeyPairID))
if err != nil {
return nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
series := &Series{
crunchy: crunchy,
}
series.ID = id
if err = decodeMapToStruct(jsonBody, series); err != nil {
return nil, err
}
return series, nil
}
// Seasons returns all seasons of a series
func (s *Series) Seasons() (seasons []*Season, err error) {
if s.children != nil {
return s.children, nil
}
resp, err := s.crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/seasons?series_id=%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
s.crunchy.Config.CountryCode,
s.crunchy.Config.MaturityRating,
s.crunchy.Config.Channel,
s.ID,
s.crunchy.Locale,
s.crunchy.Config.Signature,
s.crunchy.Config.Policy,
s.crunchy.Config.KeyPairID))
if err != nil {
return nil, err
}
defer resp.Body.Close()
var jsonBody map[string]interface{}
json.NewDecoder(resp.Body).Decode(&jsonBody)
for _, item := range jsonBody["items"].([]interface{}) {
season := &Season{
crunchy: s.crunchy,
}
if err = decodeMapToStruct(item, season); err != nil {
return nil, err
}
seasons = append(seasons, season)
}
if s.crunchy.cache {
s.children = seasons
}
return
}