mirror of
https://github.com/crunchy-labs/crunchy-cli.git
synced 2026-01-21 12:12:00 -06:00
Compare commits
265 commits
v3.0.0-dev
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4332b1beef | ||
|
|
2cf9125de3 | ||
|
|
756022b955 | ||
|
|
509683d23a | ||
|
|
8047680799 | ||
|
|
287df84382 | ||
|
|
e7ac6d8874 | ||
|
|
fb8e535644 | ||
|
|
67c267be20 | ||
|
|
a1c7b2069d | ||
|
|
74e5e05b0f | ||
|
|
7d2ae719c8 | ||
|
|
5593046aae | ||
|
|
f8bd092987 | ||
|
|
cbe57e2b6e | ||
|
|
f7ce888329 | ||
|
|
301dac478f | ||
|
|
9819b62259 | ||
|
|
5279a9b759 | ||
|
|
a98e31f959 | ||
|
|
590242712b | ||
|
|
817963af4f | ||
|
|
48bb7a5ef6 | ||
|
|
53a710a373 | ||
|
|
ab63dcd2e0 | ||
|
|
4d1df83342 | ||
|
|
89b9c5db39 | ||
|
|
96d3de48cf | ||
|
|
dad91dba91 | ||
|
|
757d3094ea | ||
|
|
fca1b74cac | ||
|
|
0f7d7d928c | ||
|
|
f77804fcb5 | ||
|
|
4066b8511c | ||
|
|
dcbe433a9c | ||
|
|
55f1e1d32d | ||
|
|
442173c08c | ||
|
|
173292ff32 | ||
|
|
72c574c883 | ||
|
|
f237033aff | ||
|
|
bf28dbf1ce | ||
|
|
cf8bfb02ac | ||
|
|
74aaed4e7a | ||
|
|
177ceb1920 | ||
|
|
541f0e2747 | ||
|
|
777b39aba1 | ||
|
|
4f3475131c | ||
|
|
177aa37631 | ||
|
|
8fff807ae6 | ||
|
|
db6e45e7f4 | ||
|
|
8ada822396 | ||
|
|
9bdd3aa85b | ||
|
|
4fc20c7c1c | ||
|
|
6515d3025f | ||
|
|
fe17f3951e | ||
|
|
cdad7fc000 | ||
|
|
d7dac2acd4 | ||
|
|
dbbb445c55 | ||
|
|
733d9f9787 | ||
|
|
0257fdea0d | ||
|
|
9e5feef4d4 | ||
|
|
b9f5fadbb3 | ||
|
|
ea39dcbc71 | ||
|
|
a73773ce1d | ||
|
|
0115730d60 | ||
|
|
18534b259b | ||
|
|
77103ff1f1 | ||
|
|
771594a231 | ||
|
|
1a511e12f9 | ||
|
|
fe49161e93 | ||
|
|
25cde6163c | ||
|
|
4b74299733 | ||
|
|
c40ea8b132 | ||
|
|
6b6d24a575 | ||
|
|
8c1868f2fd | ||
|
|
af8ab24826 | ||
|
|
c0f3346846 | ||
|
|
111e461b30 | ||
|
|
f16cd25ea4 | ||
|
|
e694046b07 | ||
|
|
ba8028737d | ||
|
|
89be8ac429 | ||
|
|
26a858c1a1 | ||
|
|
d3696c783c | ||
|
|
88a28e843f | ||
|
|
a0fa2bfd8a | ||
|
|
013273b832 | ||
|
|
3bf2458774 | ||
|
|
e3a7fd9246 | ||
|
|
f1d266c940 | ||
|
|
3f33db6728 | ||
|
|
56f0ed1795 | ||
|
|
9c44fa7dae | ||
|
|
3099aac0e7 | ||
|
|
9a6959970a | ||
|
|
d2589a3a6f | ||
|
|
52da6eacc9 | ||
|
|
5634ce3277 | ||
|
|
6a7aa25e1a | ||
|
|
6a50567916 | ||
|
|
2084328069 | ||
|
|
d3ab2245a8 | ||
|
|
c31b1f4db9 | ||
|
|
8187269128 | ||
|
|
5d68f0334a | ||
|
|
a2464bad4e | ||
|
|
0f06c7ac71 | ||
|
|
f8309f2e80 | ||
|
|
982e521e0b | ||
|
|
a4abb14ae3 | ||
|
|
7cf7a8e71c | ||
|
|
3b9fc52890 | ||
|
|
444dc65a29 | ||
|
|
658bb86800 | ||
|
|
6e01e9e8a7 | ||
|
|
937e9a2fdc | ||
|
|
fbe182239a | ||
|
|
5490243df8 | ||
|
|
20f796f603 | ||
|
|
f3faa5bf94 | ||
|
|
3f401ccbd7 | ||
|
|
35447c5cb0 | ||
|
|
333d574e56 | ||
|
|
7c42f29596 | ||
|
|
ef2898f0e1 | ||
|
|
650338d3e6 | ||
|
|
c37d55aade | ||
|
|
d90f45fa31 | ||
|
|
99f96e3e35 | ||
|
|
d3837f2495 | ||
|
|
fc6da9a76d | ||
|
|
283a3802b2 | ||
|
|
172e3612d0 | ||
|
|
2e6246c439 | ||
|
|
d503d459cd | ||
|
|
19935df545 | ||
|
|
0da81a4814 | ||
|
|
0a26083232 | ||
|
|
8613ea80cc | ||
|
|
b97c2a922e | ||
|
|
be3248a4f9 | ||
|
|
f9e431e181 | ||
|
|
77609be598 | ||
|
|
b4057599a1 | ||
|
|
6c7ab04b99 | ||
|
|
9487dd3dbf | ||
|
|
9ca3b79291 | ||
|
|
8f77028fcb | ||
|
|
d5df3df95f | ||
|
|
440ccd99b5 | ||
|
|
2c37093959 | ||
|
|
14e71c05b8 | ||
|
|
d52fe7fb92 | ||
|
|
c08931b610 | ||
|
|
fc6511a361 | ||
|
|
56411c6547 | ||
|
|
4d01e2a4ec | ||
|
|
cd35dfe276 | ||
|
|
f31437fba2 | ||
|
|
e5d9c27af7 | ||
|
|
787d8ab02c | ||
|
|
7594412f58 | ||
|
|
d8b76f8cc7 | ||
|
|
f56d9ecabf | ||
|
|
5a3a304443 | ||
|
|
d0fe7f54f6 | ||
|
|
685c79d673 | ||
|
|
5d17bb1ac7 | ||
|
|
568bce0008 | ||
|
|
bbb5a78765 | ||
|
|
81385ef6ce | ||
|
|
13335c020b | ||
|
|
e5db8e9504 | ||
|
|
5bc68ad592 | ||
|
|
7095e2b8b6 | ||
|
|
610593a795 | ||
|
|
9596175f7f | ||
|
|
f48474ba77 | ||
|
|
d79197edc6 | ||
|
|
a93a1fa807 | ||
|
|
3e21ca4fe7 | ||
|
|
01913e0db3 | ||
|
|
64428ea7d1 | ||
|
|
8eda8df3f7 | ||
|
|
185b65fc9b | ||
|
|
7485bd8e76 | ||
|
|
0f7e6c9120 | ||
|
|
b477ca982c | ||
|
|
18f891efd2 | ||
|
|
3ae6fe4a1a | ||
|
|
e06e6b2b01 | ||
|
|
70b3a7a3e1 | ||
|
|
a80f6e5df4 | ||
|
|
2f57b07559 | ||
|
|
596fcc2342 | ||
|
|
70b41b4dd5 | ||
|
|
6a6b981979 | ||
|
|
92ed4bd87d | ||
|
|
d295a57f84 | ||
|
|
31fe1460f1 | ||
|
|
f45bb19cd7 | ||
|
|
6da292f013 | ||
|
|
a45833f5a2 | ||
|
|
448b633be8 | ||
|
|
800df5ca6c | ||
|
|
9f9aec1f8a | ||
|
|
b98332eae4 | ||
|
|
40397e96a3 | ||
|
|
aef2fddff7 | ||
|
|
a12a8bc366 | ||
|
|
6f40ffacec | ||
|
|
2bcaa6e4d5 | ||
|
|
0586f38cdc | ||
|
|
435b75bbf9 | ||
|
|
b1342d54f3 | ||
|
|
700b041f9a | ||
|
|
84c70f2bee | ||
|
|
4c396a9e4a | ||
|
|
9ced3483d8 | ||
|
|
db156d361f | ||
|
|
5afda0b5f1 | ||
|
|
068c0fcac1 | ||
|
|
00e8082e66 | ||
|
|
dc6bc0d951 | ||
|
|
4ec9a0d309 | ||
|
|
566422cb06 | ||
|
|
dd2033d323 | ||
|
|
8490263e84 | ||
|
|
6b76887978 | ||
|
|
850aa7a969 | ||
|
|
9ad27102fc | ||
|
|
513353890d | ||
|
|
49de7bbba9 | ||
|
|
751735477c | ||
|
|
1fe8746dda | ||
|
|
af8a88a792 | ||
|
|
f40dc0dd1c | ||
|
|
0234d46bf9 | ||
|
|
618d2206a2 | ||
|
|
75b6e7b452 | ||
|
|
fc44b8af8a | ||
|
|
d75c04fbb6 | ||
|
|
f4682e0f29 | ||
|
|
5b8a4b9969 | ||
|
|
0ef4980ab3 | ||
|
|
2ebc76a0df | ||
|
|
f7af983526 | ||
|
|
0cd647fb14 | ||
|
|
4e4a4355f5 | ||
|
|
0b044ba27e | ||
|
|
26ca3ca65c | ||
|
|
e9b4837f44 | ||
|
|
0aa648b1a5 | ||
|
|
0beaa99bfd | ||
|
|
7ed1158339 | ||
|
|
b55ac9a51a | ||
|
|
32aab193d0 | ||
|
|
49d64805ca | ||
|
|
19f79a4349 | ||
|
|
f3f900064a | ||
|
|
a2b7c78752 | ||
|
|
4bd172df06 | ||
|
|
b24827dc6b | ||
|
|
61766c74fa | ||
|
|
c2e953043e |
48 changed files with 7061 additions and 5409 deletions
1
.envrc
Normal file
1
.envrc
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
use flake
|
||||||
48
.github/workflow-resources/PKGBUILD.binary
vendored
Normal file
48
.github/workflow-resources/PKGBUILD.binary
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
# Maintainer: ByteDream
|
||||||
|
pkgname=crunchy-cli-bin
|
||||||
|
pkgdesc="Command-line downloader for Crunchyroll"
|
||||||
|
arch=('x86_64' 'aarch64')
|
||||||
|
url="https://github.com/crunchy-labs/crunchy-cli"
|
||||||
|
license=('MIT')
|
||||||
|
|
||||||
|
pkgver=$CI_PKG_VERSION
|
||||||
|
pkgrel=1
|
||||||
|
|
||||||
|
depends=('ffmpeg')
|
||||||
|
provides=('crunchy-cli')
|
||||||
|
conflicts=('crunchy-cli')
|
||||||
|
source_x86_64=(
|
||||||
|
"crunchy-cli::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-linux-x86_64"
|
||||||
|
"manpages.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-manpages.zip"
|
||||||
|
"completions.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-completions.zip"
|
||||||
|
"LICENSE::https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/v${pkgver}/LICENSE"
|
||||||
|
)
|
||||||
|
source_aarch64=(
|
||||||
|
"crunchy-cli::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-linux-aarch64"
|
||||||
|
"manpages.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-manpages.zip"
|
||||||
|
"completions.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-completions.zip"
|
||||||
|
"LICENSE::https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/v${pkgver}/LICENSE"
|
||||||
|
)
|
||||||
|
noextract=("manpages.zip" "completions.zip")
|
||||||
|
sha256sums_x86_64=('$CI_AMD_BINARY_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM')
|
||||||
|
sha256sums_aarch64=('$CI_ARM_BINARY_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM')
|
||||||
|
|
||||||
|
package() {
|
||||||
|
cd "$srcdir"
|
||||||
|
|
||||||
|
# all files in manpages.zip and completions.zip are stored in root of the archive, makepkg extracts them all to $srcdir
|
||||||
|
# which makes it pretty messy. so the extraction is done manually to keep the content of $srcdir structured
|
||||||
|
mkdir manpages completions
|
||||||
|
cd manpages
|
||||||
|
bsdtar -xf ../manpages.zip
|
||||||
|
cd ../completions
|
||||||
|
bsdtar -xf ../completions.zip
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
install -Dm755 crunchy-cli $pkgdir/usr/bin/crunchy-cli
|
||||||
|
install -Dm644 manpages/* -t $pkgdir/usr/share/man/man1
|
||||||
|
install -Dm644 completions/crunchy-cli.bash $pkgdir/usr/share/bash-completion/completions/crunchy-cli
|
||||||
|
install -Dm644 completions/_crunchy-cli $pkgdir/usr/share/zsh/site-functions/_crunchy-cli
|
||||||
|
install -Dm644 completions/crunchy-cli.fish $pkgdir/usr/share/fish/vendor_completions.d/crunchy-cli.fish
|
||||||
|
install -Dm644 LICENSE $pkgdir/usr/share/licenses/crunchy-cli/LICENSE
|
||||||
|
}
|
||||||
46
.github/workflow-resources/PKGBUILD.source
vendored
Normal file
46
.github/workflow-resources/PKGBUILD.source
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
# Maintainer: ByteDream
|
||||||
|
pkgname=crunchy-cli
|
||||||
|
pkgdesc="Command-line downloader for Crunchyroll"
|
||||||
|
arch=('x86_64' 'i686' 'arm' 'armv6h' 'armv7h' 'aarch64')
|
||||||
|
url="https://github.com/crunchy-labs/crunchy-cli"
|
||||||
|
license=('MIT')
|
||||||
|
|
||||||
|
pkgver=$CI_PKG_VERSION
|
||||||
|
pkgrel=1
|
||||||
|
|
||||||
|
depends=('ffmpeg' 'openssl')
|
||||||
|
makedepends=('cargo')
|
||||||
|
source=("${pkgname}-${pkgver}.tar.gz::https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/v${pkgver}.tar.gz")
|
||||||
|
sha256sums=('$CI_SHA_SUM')
|
||||||
|
# lto causes linking errors when executed by this buildscript. besides, lto is already done by cargo itself (which doesn't cause linking errors)
|
||||||
|
options=(!lto)
|
||||||
|
|
||||||
|
prepare() {
|
||||||
|
cd "$srcdir/${pkgname}-$pkgver"
|
||||||
|
|
||||||
|
export RUSTUP_TOOLCHAIN=stable
|
||||||
|
export CARGO_HOME="$srcdir/cargo-home"
|
||||||
|
|
||||||
|
cargo fetch --locked --target "$(rustc -vV | sed -n 's/host: //p')"
|
||||||
|
}
|
||||||
|
|
||||||
|
build() {
|
||||||
|
cd "$srcdir/${pkgname}-$pkgver"
|
||||||
|
|
||||||
|
export RUSTUP_TOOLCHAIN=stable
|
||||||
|
export CARGO_HOME="$srcdir/cargo-home"
|
||||||
|
|
||||||
|
export CRUNCHY_CLI_GIT_HASH=$CI_GIT_HASH
|
||||||
|
cargo build --frozen --release
|
||||||
|
}
|
||||||
|
|
||||||
|
package() {
|
||||||
|
cd "$srcdir/${pkgname}-$pkgver"
|
||||||
|
|
||||||
|
install -Dm755 target/release/crunchy-cli $pkgdir/usr/bin/crunchy-cli
|
||||||
|
install -Dm644 target/release/manpages/* -t $pkgdir/usr/share/man/man1
|
||||||
|
install -Dm644 target/release/completions/crunchy-cli.bash $pkgdir/usr/share/bash-completion/completions/crunchy-cli
|
||||||
|
install -Dm644 target/release/completions/_crunchy-cli $pkgdir/usr/share/zsh/site-functions/_crunchy-cli
|
||||||
|
install -Dm644 target/release/completions/crunchy-cli.fish $pkgdir/usr/share/fish/vendor_completions.d/crunchy-cli.fish
|
||||||
|
install -Dm644 LICENSE $pkgdir/usr/share/licenses/crunchy-cli/LICENSE
|
||||||
|
}
|
||||||
145
.github/workflows/build.yml
vendored
Normal file
145
.github/workflows/build.yml
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
||||||
|
name: build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '*'
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-linux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- arch: x86_64
|
||||||
|
toolchain: x86_64-unknown-linux-musl
|
||||||
|
- arch: aarch64
|
||||||
|
toolchain: aarch64-unknown-linux-musl
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cargo cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
key: ${{ matrix.toolchain }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Setup Rust
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Install cross
|
||||||
|
run: cargo install --force cross
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: cross build --locked --release --no-default-features --features openssl-tls-static --target ${{ matrix.toolchain }}
|
||||||
|
|
||||||
|
- name: Upload binary artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: crunchy-cli-linux-${{ matrix.arch }}
|
||||||
|
path: ./target/${{ matrix.toolchain }}/release/crunchy-cli
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
- name: Upload manpages artifact
|
||||||
|
if: ${{ matrix.arch == 'x86_64' }} # only upload the manpages once
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: manpages
|
||||||
|
path: ./target/${{ matrix.toolchain }}/release/manpages
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
- name: Upload completions artifact
|
||||||
|
if: ${{ matrix.arch == 'x86_64' }} # only upload the completions once
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: completions
|
||||||
|
path: ./target/${{ matrix.toolchain }}/release/completions
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
build-mac:
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
# macos-13 uses x86_64, macos-14 aarch64
|
||||||
|
# see https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources
|
||||||
|
include:
|
||||||
|
- os: macos-13
|
||||||
|
arch: x86_64
|
||||||
|
toolchain: x86_64-apple-darwin
|
||||||
|
- os: macos-14
|
||||||
|
arch: aarch64
|
||||||
|
toolchain: aarch64-apple-darwin
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cargo cache
|
||||||
|
if: ${{ matrix.os != 'macos-13' }} # when using cache, the 'Setup Rust' step fails for macos 13
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
key: x86_64-apple-darwin-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Setup Rust
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: cargo build --locked --release --target ${{ matrix.toolchain }}
|
||||||
|
|
||||||
|
- name: Upload binary artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: crunchy-cli-darwin-${{ matrix.arch }}
|
||||||
|
path: ./target/${{ matrix.toolchain }}/release/crunchy-cli
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
build-windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cargo cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
key: x86_64-pc-windows-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Install system dependencies
|
||||||
|
uses: msys2/setup-msys2@v2
|
||||||
|
with:
|
||||||
|
update: true
|
||||||
|
install: mingw-w64-x86_64-rust base-devel
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
shell: msys2 {0}
|
||||||
|
run: cargo build --locked --release --target x86_64-pc-windows-gnu
|
||||||
|
|
||||||
|
- name: Upload binary artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: crunchy-cli-windows-x86_64
|
||||||
|
path: ./target/x86_64-pc-windows-gnu/release/crunchy-cli.exe
|
||||||
|
if-no-files-found: error
|
||||||
78
.github/workflows/ci.yml
vendored
78
.github/workflows/ci.yml
vendored
|
|
@ -1,78 +0,0 @@
|
||||||
name: ci
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
pull_request:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- os: ubuntu-latest
|
|
||||||
toolchain: x86_64-unknown-linux-musl
|
|
||||||
platform: linux
|
|
||||||
ext:
|
|
||||||
- os: windows-latest
|
|
||||||
toolchain: x86_64-pc-windows-gnu
|
|
||||||
platform: windows
|
|
||||||
ext: .exe
|
|
||||||
- os: macos-latest
|
|
||||||
toolchain: x86_64-apple-darwin
|
|
||||||
platform: darwin
|
|
||||||
ext:
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Cargo cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.cargo/bin/
|
|
||||||
~/.cargo/registry/index/
|
|
||||||
~/.cargo/registry/cache/
|
|
||||||
~/.cargo/git/db/
|
|
||||||
target/
|
|
||||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
|
||||||
|
|
||||||
- name: Install system dependencies
|
|
||||||
if: matrix.platform == 'linux'
|
|
||||||
run: sudo apt-get install musl-tools
|
|
||||||
|
|
||||||
- name: Install toolchain
|
|
||||||
uses: dtolnay/rust-toolchain@stable
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
target: ${{ matrix.toolchain }}
|
|
||||||
|
|
||||||
- name: Test
|
|
||||||
run: cargo test --release --all-features --target ${{ matrix.toolchain }}
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
run: cargo build --release --all-features --target ${{ matrix.toolchain }}
|
|
||||||
|
|
||||||
- name: Upload binary artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: crunchy-cli_${{ matrix.platform }}
|
|
||||||
path: ./target/${{ matrix.toolchain }}/release/crunchy-cli${{ matrix.ext }}
|
|
||||||
if-no-files-found: error
|
|
||||||
|
|
||||||
- name: Upload manpages artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: manpages
|
|
||||||
path: ./target/${{ matrix.toolchain }}/release/manpages
|
|
||||||
if-no-files-found: error
|
|
||||||
|
|
||||||
- name: Upload completions artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: completions
|
|
||||||
path: ./target/${{ matrix.toolchain }}/release/completions
|
|
||||||
if-no-files-found: error
|
|
||||||
58
.github/workflows/lint.yml
vendored
Normal file
58
.github/workflows/lint.yml
vendored
Normal file
|
|
@ -0,0 +1,58 @@
|
||||||
|
name: lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '*'
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
fmt:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cargo cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
key: x86_64-unknown-linux-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Setup Rust
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Check fmt
|
||||||
|
run: cargo fmt --check
|
||||||
|
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cargo cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
key: x86_64-unknown-linux-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Setup Rust
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Lint
|
||||||
|
run: cargo clippy -- -D warnings
|
||||||
74
.github/workflows/publish.yml
vendored
Normal file
74
.github/workflows/publish.yml
vendored
Normal file
|
|
@ -0,0 +1,74 @@
|
||||||
|
name: publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- v*
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish-aur:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Get version
|
||||||
|
run: echo "RELEASE_VERSION=$(echo ${{ github.ref_name }} | cut -c 2-)" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Generate crunchy-cli sha sum
|
||||||
|
run: |
|
||||||
|
curl -LO https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/${{ github.ref_name }}.tar.gz
|
||||||
|
echo "CRUNCHY_CLI_SHA256=$(sha256sum ${{ github.ref_name }}.tar.gz | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Get release commit hash
|
||||||
|
run: echo "CRUNCHY_CLI_GIT_HASH=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Generate crunchy-cli PKGBUILD
|
||||||
|
env:
|
||||||
|
CI_PKG_VERSION: ${{ env.RELEASE_VERSION }}
|
||||||
|
CI_SHA_SUM: ${{ env.CRUNCHY_CLI_SHA256 }}
|
||||||
|
CI_GIT_HASH: ${{ env.CRUNCHY_CLI_GIT_HASH }}
|
||||||
|
run: envsubst '$CI_PKG_VERSION,$CI_SHA_SUM,$CI_GIT_HASH' < .github/workflow-resources/PKGBUILD.source > PKGBUILD
|
||||||
|
|
||||||
|
- name: Publish crunchy-cli to AUR
|
||||||
|
uses: KSXGitHub/github-actions-deploy-aur@v2.7.0
|
||||||
|
with:
|
||||||
|
pkgname: crunchy-cli
|
||||||
|
pkgbuild: ./PKGBUILD
|
||||||
|
commit_username: release-action
|
||||||
|
commit_email: ${{ secrets.AUR_EMAIL }}
|
||||||
|
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||||
|
commit_message: Update to version ${{ env.RELEASE_VERSION }}
|
||||||
|
|
||||||
|
- name: Generate crunchy-cli-bin sha sums
|
||||||
|
run: |
|
||||||
|
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-linux-x86_64
|
||||||
|
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-linux-aarch64
|
||||||
|
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-completions.zip
|
||||||
|
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-manpages.zip
|
||||||
|
curl -LO https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/${{ github.ref_name }}/LICENSE
|
||||||
|
echo "CRUNCHY_CLI_BIN_x86_64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-linux-x86_64 | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||||
|
echo "CRUNCHY_CLI_BIN_aarch64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-linux-aarch64 | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||||
|
echo "CRUNCHY_CLI_BIN_COMPLETIONS_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-completions.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||||
|
echo "CRUNCHY_CLI_BIN_MANPAGES_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-manpages.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||||
|
echo "CRUNCHY_CLI_BIN_LICENSE_SHA256=$(sha256sum LICENSE | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Generate crunchy-cli-bin PKGBUILD
|
||||||
|
env:
|
||||||
|
CI_PKG_VERSION: ${{ env.RELEASE_VERSION }}
|
||||||
|
CI_AMD_BINARY_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_x86_64_SHA256 }}
|
||||||
|
CI_ARM_BINARY_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_aarch64_SHA256 }}
|
||||||
|
CI_MANPAGES_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_MANPAGES_SHA256 }}
|
||||||
|
CI_COMPLETIONS_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_COMPLETIONS_SHA256 }}
|
||||||
|
CI_LICENSE_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_LICENSE_SHA256 }}
|
||||||
|
run: envsubst '$CI_PKG_VERSION,$CI_AMD_BINARY_SHA_SUM,$CI_ARM_BINARY_SHA_SUM,$CI_COMPLETIONS_SHA_SUM,$CI_MANPAGES_SHA_SUM,$CI_LICENSE_SHA_SUM' < .github/workflow-resources/PKGBUILD.binary > PKGBUILD
|
||||||
|
|
||||||
|
- name: Publish crunchy-cli-bin to AUR
|
||||||
|
uses: KSXGitHub/github-actions-deploy-aur@v2.7.0
|
||||||
|
with:
|
||||||
|
pkgname: crunchy-cli-bin
|
||||||
|
pkgbuild: ./PKGBUILD
|
||||||
|
commit_username: release-action
|
||||||
|
commit_email: ${{ secrets.AUR_EMAIL }}
|
||||||
|
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||||
|
commit_message: Update to version ${{ env.RELEASE_VERSION }}
|
||||||
9
.gitignore
vendored
9
.gitignore
vendored
|
|
@ -1,3 +1,10 @@
|
||||||
/.idea
|
# Rust
|
||||||
/target
|
/target
|
||||||
|
|
||||||
|
# Editor
|
||||||
|
/.idea
|
||||||
/.vscode
|
/.vscode
|
||||||
|
|
||||||
|
# Nix
|
||||||
|
/result
|
||||||
|
/.direnv
|
||||||
|
|
|
||||||
1855
Cargo.lock
generated
1855
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
27
Cargo.toml
27
Cargo.toml
|
|
@ -1,22 +1,41 @@
|
||||||
[package]
|
[package]
|
||||||
name = "crunchy-cli"
|
name = "crunchy-cli"
|
||||||
authors = ["Crunchy Labs Maintainers"]
|
authors = ["Crunchy Labs Maintainers"]
|
||||||
version = "3.0.0-dev.13"
|
version = "3.6.7"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["native-tls"]
|
||||||
|
|
||||||
|
rustls-tls = ["crunchy-cli-core/rustls-tls"]
|
||||||
|
native-tls = ["crunchy-cli-core/native-tls"]
|
||||||
|
openssl-tls = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls"]
|
||||||
|
openssl-tls-static = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls-static"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tokio = { version = "1.27", features = ["macros", "rt-multi-thread", "time"], default-features = false }
|
tokio = { version = "1.38", features = ["macros", "rt-multi-thread", "time"], default-features = false }
|
||||||
|
|
||||||
|
native-tls-crate = { package = "native-tls", version = "0.2.12", optional = true }
|
||||||
|
|
||||||
crunchy-cli-core = { path = "./crunchy-cli-core" }
|
crunchy-cli-core = { path = "./crunchy-cli-core" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
clap = { version = "4.2", features = ["string"] }
|
clap = { version = "4.5", features = ["string"] }
|
||||||
clap_complete = "4.2"
|
clap_complete = "4.5"
|
||||||
clap_mangen = "0.2"
|
clap_mangen = "0.2"
|
||||||
|
|
||||||
crunchy-cli-core = { path = "./crunchy-cli-core" }
|
crunchy-cli-core = { path = "./crunchy-cli-core" }
|
||||||
|
|
||||||
|
[workspace]
|
||||||
|
members = ["crunchy-cli-core"]
|
||||||
|
|
||||||
|
[patch.crates-io]
|
||||||
|
# fork of the `native-tls` crate which can use openssl as backend on every platform. this is done as `reqwest` only
|
||||||
|
# supports `rustls` and `native-tls` as tls backend
|
||||||
|
native-tls = { git = "https://github.com/crunchy-labs/rust-not-so-native-tls.git", rev = "c7ac566" }
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
strip = true
|
strip = true
|
||||||
opt-level = "z"
|
opt-level = "z"
|
||||||
|
|
|
||||||
699
LICENSE
699
LICENSE
|
|
@ -1,674 +1,25 @@
|
||||||
GNU GENERAL PUBLIC LICENSE
|
Copyright (c) 2023-NOW Crunchy Labs Team
|
||||||
Version 3, 29 June 2007
|
|
||||||
|
Permission is hereby granted, free of charge, to any
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
person obtaining a copy of this software and associated
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
documentation files (the "Software"), to deal in the
|
||||||
of this license document, but changing it is not allowed.
|
Software without restriction, including without
|
||||||
|
limitation the rights to use, copy, modify, merge,
|
||||||
Preamble
|
publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software
|
||||||
The GNU General Public License is a free, copyleft license for
|
is furnished to do so, subject to the following
|
||||||
software and other kinds of works.
|
conditions:
|
||||||
|
|
||||||
The licenses for most software and other practical works are designed
|
The above copyright notice and this permission notice
|
||||||
to take away your freedom to share and change the works. By contrast,
|
shall be included in all copies or substantial portions
|
||||||
the GNU General Public License is intended to guarantee your freedom to
|
of the Software.
|
||||||
share and change all versions of a program--to make sure it remains free
|
|
||||||
software for all its users. We, the Free Software Foundation, use the
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||||
GNU General Public License for most of our software; it applies also to
|
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||||
any other work released this way by its authors. You can apply it to
|
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||||
your programs, too.
|
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||||
|
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
When we speak of free software, we are referring to freedom, not
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||||
have the freedom to distribute copies of free software (and charge for
|
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||||
them if you wish), that you receive source code or can get it if you
|
DEALINGS IN THE SOFTWARE.
|
||||||
want it, that you can change the software or use pieces of it in new
|
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
|
||||||
To protect your rights, we need to prevent others from denying you
|
|
||||||
these rights or asking you to surrender the rights. Therefore, you have
|
|
||||||
certain responsibilities if you distribute copies of the software, or if
|
|
||||||
you modify it: responsibilities to respect the freedom of others.
|
|
||||||
|
|
||||||
For example, if you distribute copies of such a program, whether
|
|
||||||
gratis or for a fee, you must pass on to the recipients the same
|
|
||||||
freedoms that you received. You must make sure that they, too, receive
|
|
||||||
or can get the source code. And you must show them these terms so they
|
|
||||||
know their rights.
|
|
||||||
|
|
||||||
Developers that use the GNU GPL protect your rights with two steps:
|
|
||||||
(1) assert copyright on the software, and (2) offer you this License
|
|
||||||
giving you legal permission to copy, distribute and/or modify it.
|
|
||||||
|
|
||||||
For the developers' and authors' protection, the GPL clearly explains
|
|
||||||
that there is no warranty for this free software. For both users' and
|
|
||||||
authors' sake, the GPL requires that modified versions be marked as
|
|
||||||
changed, so that their problems will not be attributed erroneously to
|
|
||||||
authors of previous versions.
|
|
||||||
|
|
||||||
Some devices are designed to deny users access to install or run
|
|
||||||
modified versions of the software inside them, although the manufacturer
|
|
||||||
can do so. This is fundamentally incompatible with the aim of
|
|
||||||
protecting users' freedom to change the software. The systematic
|
|
||||||
pattern of such abuse occurs in the area of products for individuals to
|
|
||||||
use, which is precisely where it is most unacceptable. Therefore, we
|
|
||||||
have designed this version of the GPL to prohibit the practice for those
|
|
||||||
products. If such problems arise substantially in other domains, we
|
|
||||||
stand ready to extend this provision to those domains in future versions
|
|
||||||
of the GPL, as needed to protect the freedom of users.
|
|
||||||
|
|
||||||
Finally, every program is threatened constantly by software patents.
|
|
||||||
States should not allow patents to restrict development and use of
|
|
||||||
software on general-purpose computers, but in those that do, we wish to
|
|
||||||
avoid the special danger that patents applied to a free program could
|
|
||||||
make it effectively proprietary. To prevent this, the GPL assures that
|
|
||||||
patents cannot be used to render the program non-free.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
|
||||||
modification follow.
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
0. Definitions.
|
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU General Public License.
|
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
|
||||||
works, such as semiconductor masks.
|
|
||||||
|
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
|
||||||
"recipients" may be individuals or organizations.
|
|
||||||
|
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
|
||||||
in a fashion requiring copyright permission, other than the making of an
|
|
||||||
exact copy. The resulting work is called a "modified version" of the
|
|
||||||
earlier work or a work "based on" the earlier work.
|
|
||||||
|
|
||||||
A "covered work" means either the unmodified Program or a work based
|
|
||||||
on the Program.
|
|
||||||
|
|
||||||
To "propagate" a work means to do anything with it that, without
|
|
||||||
permission, would make you directly or secondarily liable for
|
|
||||||
infringement under applicable copyright law, except executing it on a
|
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
|
||||||
distribution (with or without modification), making available to the
|
|
||||||
public, and in some countries other activities as well.
|
|
||||||
|
|
||||||
To "convey" a work means any kind of propagation that enables other
|
|
||||||
parties to make or receive copies. Mere interaction with a user through
|
|
||||||
a computer network, with no transfer of a copy, is not conveying.
|
|
||||||
|
|
||||||
An interactive user interface displays "Appropriate Legal Notices"
|
|
||||||
to the extent that it includes a convenient and prominently visible
|
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
|
||||||
tells the user that there is no warranty for the work (except to the
|
|
||||||
extent that warranties are provided), that licensees may convey the
|
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
|
||||||
menu, a prominent item in the list meets this criterion.
|
|
||||||
|
|
||||||
1. Source Code.
|
|
||||||
|
|
||||||
The "source code" for a work means the preferred form of the work
|
|
||||||
for making modifications to it. "Object code" means any non-source
|
|
||||||
form of a work.
|
|
||||||
|
|
||||||
A "Standard Interface" means an interface that either is an official
|
|
||||||
standard defined by a recognized standards body, or, in the case of
|
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
|
||||||
|
|
||||||
The "System Libraries" of an executable work include anything, other
|
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
|
||||||
packaging a Major Component, but which is not part of that Major
|
|
||||||
Component, and (b) serves only to enable use of the work with that
|
|
||||||
Major Component, or to implement a Standard Interface for which an
|
|
||||||
implementation is available to the public in source code form. A
|
|
||||||
"Major Component", in this context, means a major essential component
|
|
||||||
(kernel, window system, and so on) of the specific operating system
|
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
|
||||||
produce the work, or an object code interpreter used to run it.
|
|
||||||
|
|
||||||
The "Corresponding Source" for a work in object code form means all
|
|
||||||
the source code needed to generate, install, and (for an executable
|
|
||||||
work) run the object code and to modify the work, including scripts to
|
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
|
||||||
programs which are used unmodified in performing those activities but
|
|
||||||
which are not part of the work. For example, Corresponding Source
|
|
||||||
includes interface definition files associated with source files for
|
|
||||||
the work, and the source code for shared libraries and dynamically
|
|
||||||
linked subprograms that the work is specifically designed to require,
|
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
|
||||||
|
|
||||||
The Corresponding Source need not include anything that users
|
|
||||||
can regenerate automatically from other parts of the Corresponding
|
|
||||||
Source.
|
|
||||||
|
|
||||||
The Corresponding Source for a work in source code form is that
|
|
||||||
same work.
|
|
||||||
|
|
||||||
2. Basic Permissions.
|
|
||||||
|
|
||||||
All rights granted under this License are granted for the term of
|
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
|
||||||
permission to run the unmodified Program. The output from running a
|
|
||||||
covered work is covered by this License only if the output, given its
|
|
||||||
content, constitutes a covered work. This License acknowledges your
|
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
|
||||||
|
|
||||||
You may make, run and propagate covered works that you do not
|
|
||||||
convey, without conditions so long as your license otherwise remains
|
|
||||||
in force. You may convey covered works to others for the sole purpose
|
|
||||||
of having them make modifications exclusively for you, or provide you
|
|
||||||
with facilities for running those works, provided that you comply with
|
|
||||||
the terms of this License in conveying all material for which you do
|
|
||||||
not control copyright. Those thus making or running the covered works
|
|
||||||
for you must do so exclusively on your behalf, under your direction
|
|
||||||
and control, on terms that prohibit them from making any copies of
|
|
||||||
your copyrighted material outside their relationship with you.
|
|
||||||
|
|
||||||
Conveying under any other circumstances is permitted solely under
|
|
||||||
the conditions stated below. Sublicensing is not allowed; section 10
|
|
||||||
makes it unnecessary.
|
|
||||||
|
|
||||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
|
||||||
No covered work shall be deemed part of an effective technological
|
|
||||||
measure under any applicable law fulfilling obligations under article
|
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
|
||||||
similar laws prohibiting or restricting circumvention of such
|
|
||||||
measures.
|
|
||||||
|
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
|
||||||
circumvention of technological measures to the extent such circumvention
|
|
||||||
is effected by exercising rights under this License with respect to
|
|
||||||
the covered work, and you disclaim any intention to limit operation or
|
|
||||||
modification of the work as a means of enforcing, against the work's
|
|
||||||
users, your or third parties' legal rights to forbid circumvention of
|
|
||||||
technological measures.
|
|
||||||
|
|
||||||
4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these conditions:
|
|
||||||
|
|
||||||
a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
|
|
||||||
b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under section
|
|
||||||
7. This requirement modifies the requirement in section 4 to
|
|
||||||
"keep intact all notices".
|
|
||||||
|
|
||||||
c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
|
|
||||||
d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms
|
|
||||||
of sections 4 and 5, provided that you also convey the
|
|
||||||
machine-readable Corresponding Source under the terms of this License,
|
|
||||||
in one of these ways:
|
|
||||||
|
|
||||||
a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
|
|
||||||
b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the
|
|
||||||
Corresponding Source from a network server at no charge.
|
|
||||||
|
|
||||||
c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
|
|
||||||
d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
|
|
||||||
e) Convey the object code using peer-to-peer transmission, provided
|
|
||||||
you inform other peers where the object code and Corresponding
|
|
||||||
Source of the work are being offered to the general public at no
|
|
||||||
charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal, family,
|
|
||||||
or household purposes, or (2) anything designed or sold for incorporation
|
|
||||||
into a dwelling. In determining whether a product is a consumer product,
|
|
||||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
|
||||||
product received by a particular user, "normally used" refers to a
|
|
||||||
typical or common use of that class of product, regardless of the status
|
|
||||||
of the particular user or of the way in which the particular user
|
|
||||||
actually uses, or expects or is expected to use, the product. A product
|
|
||||||
is a consumer product regardless of whether the product has substantial
|
|
||||||
commercial, industrial or non-consumer uses, unless such uses represent
|
|
||||||
the only significant mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to install
|
|
||||||
and execute modified versions of a covered work in that User Product from
|
|
||||||
a modified version of its Corresponding Source. The information must
|
|
||||||
suffice to ensure that the continued functioning of the modified object
|
|
||||||
code is in no case prevented or interfered with solely because
|
|
||||||
modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or updates
|
|
||||||
for a work that has been modified or installed by the recipient, or for
|
|
||||||
the User Product in which it has been modified or installed. Access to a
|
|
||||||
network may be denied when the modification itself materially and
|
|
||||||
adversely affects the operation of the network or violates the rules and
|
|
||||||
protocols for communication across the network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders of
|
|
||||||
that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
|
|
||||||
b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
|
|
||||||
c) Prohibiting misrepresentation of the origin of that material, or
|
|
||||||
requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
|
|
||||||
d) Limiting the use for publicity purposes of names of licensors or
|
|
||||||
authors of the material; or
|
|
||||||
|
|
||||||
e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
|
|
||||||
f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions of
|
|
||||||
it) with contractual assumptions of liability to the recipient, for
|
|
||||||
any liability that these contractual assumptions directly impose on
|
|
||||||
those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions;
|
|
||||||
the above requirements apply either way.
|
|
||||||
|
|
||||||
8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your
|
|
||||||
license from a particular copyright holder is reinstated (a)
|
|
||||||
provisionally, unless and until the copyright holder explicitly and
|
|
||||||
finally terminates your license, and (b) permanently, if the copyright
|
|
||||||
holder fails to notify you of the violation by some reasonable means
|
|
||||||
prior to 60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or
|
|
||||||
run a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims
|
|
||||||
owned or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within
|
|
||||||
the scope of its coverage, prohibits the exercise of, or is
|
|
||||||
conditioned on the non-exercise of one or more of the rights that are
|
|
||||||
specifically granted under this License. You may not convey a covered
|
|
||||||
work if you are a party to an arrangement with a third party that is
|
|
||||||
in the business of distributing software, under which you make payment
|
|
||||||
to the third party based on the extent of your activity of conveying
|
|
||||||
the work, and under which the third party grants, to any of the
|
|
||||||
parties who would receive the covered work from you, a discriminatory
|
|
||||||
patent license (a) in connection with copies of the covered work
|
|
||||||
conveyed by you (or copies made from those copies), or (b) primarily
|
|
||||||
for and in connection with specific products or compilations that
|
|
||||||
contain the covered work, unless you entered into that arrangement,
|
|
||||||
or that patent license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under this
|
|
||||||
License and any other pertinent obligations, then as a consequence you may
|
|
||||||
not convey it at all. For example, if you agree to terms that obligate you
|
|
||||||
to collect a royalty for further conveying from those to whom you convey
|
|
||||||
the Program, the only way you could satisfy both those terms and this
|
|
||||||
License would be to refrain entirely from conveying the Program.
|
|
||||||
|
|
||||||
13. Use with the GNU Affero General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU Affero General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the special requirements of the GNU Affero General Public License,
|
|
||||||
section 13, concerning interaction through a network will apply to the
|
|
||||||
combination as such.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
|
||||||
the GNU General Public License from time to time. Such new versions will
|
|
||||||
be similar in spirit to the present version, but may differ in detail to
|
|
||||||
address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Program specifies that a certain numbered version of the GNU General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU General Public License, you may choose any version ever published
|
|
||||||
by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
|
||||||
versions of the GNU General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
|
||||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|
||||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
|
||||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
|
||||||
|
|
||||||
16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
|
||||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
|
||||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
|
||||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
|
||||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
|
||||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|
||||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGES.
|
|
||||||
|
|
||||||
17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest
|
|
||||||
to attach them to the start of each source file to most effectively
|
|
||||||
state the exclusion of warranty; and each file should have at least
|
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU General Public License as published by
|
|
||||||
the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
|
||||||
|
|
||||||
If the program does terminal interaction, make it output a short
|
|
||||||
notice like this when it starts in an interactive mode:
|
|
||||||
|
|
||||||
<program> Copyright (C) <year> <name of author>
|
|
||||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
|
||||||
This is free software, and you are welcome to redistribute it
|
|
||||||
under certain conditions; type `show c' for details.
|
|
||||||
|
|
||||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
|
||||||
parts of the General Public License. Of course, your program's commands
|
|
||||||
might be different; for a GUI interface, you would use an "about box".
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
|
||||||
For more information on this, and how to apply and follow the GNU GPL, see
|
|
||||||
<https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
The GNU General Public License does not permit incorporating your program
|
|
||||||
into proprietary programs. If your program is a subroutine library, you
|
|
||||||
may consider it more useful to permit linking proprietary applications with
|
|
||||||
the library. If this is what you want to do, use the GNU Lesser General
|
|
||||||
Public License instead of this License. But first, please read
|
|
||||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
|
||||||
|
|
|
||||||
648
README.md
648
README.md
|
|
@ -1,6 +1,8 @@
|
||||||
|
# This project has been sunset as Crunchyroll moved to a DRM-only system. See [#362](https://github.com/crunchy-labs/crunchy-cli/issues/362).
|
||||||
|
|
||||||
# crunchy-cli
|
# crunchy-cli
|
||||||
|
|
||||||
A pure [Rust](https://www.rust-lang.org/) CLI for [Crunchyroll](https://www.crunchyroll.com).
|
👇 A Command-line downloader for [Crunchyroll](https://www.crunchyroll.com).
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://github.com/crunchy-labs/crunchy-cli">
|
<a href="https://github.com/crunchy-labs/crunchy-cli">
|
||||||
|
|
@ -18,31 +20,28 @@ A pure [Rust](https://www.rust-lang.org/) CLI for [Crunchyroll](https://www.crun
|
||||||
<a href="https://discord.gg/PXGPGpQxgk">
|
<a href="https://discord.gg/PXGPGpQxgk">
|
||||||
<img src="https://img.shields.io/discord/994882878125121596?label=discord&style=flat-square" alt="Discord">
|
<img src="https://img.shields.io/discord/994882878125121596?label=discord&style=flat-square" alt="Discord">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/crunchy-labs/crunchy-cli/actions/workflows/ci.yml">
|
<a href="https://github.com/crunchy-labs/crunchy-cli/actions/workflows/build.yml">
|
||||||
<img src="https://img.shields.io/github/actions/workflow/status/crunchy-labs/crunchy-cli/ci.yml?branch=master&style=flat-square" alt="CI">
|
<img src="https://img.shields.io/github/actions/workflow/status/crunchy-labs/crunchy-cli/build.yml?branch=master&style=flat-square" alt="Build">
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="#%EF%B8%8F-usage">Usage 🖥️</a>
|
<a href="#%EF%B8%8F-usage">Usage 🖥️</a>
|
||||||
•
|
•
|
||||||
<a href="#%EF%B8%8F-disclaimer">Disclaimer 📜</a>
|
<a href="#-disclaimer">Disclaimer 📜</a>
|
||||||
•
|
•
|
||||||
<a href="#-license">License ⚖</a>
|
<a href="#-license">License ⚖</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
> We are in no way affiliated with, maintained, authorized, sponsored, or officially associated with Crunchyroll LLC or any of its subsidiaries or affiliates.
|
> We are in no way affiliated with, maintained, authorized, sponsored, or officially associated with Crunchyroll LLC or any of its subsidiaries or affiliates.
|
||||||
> The official Crunchyroll website can be found at [crunchyroll.com](https://crunchyroll.com/).
|
> The official Crunchyroll website can be found at [www.crunchyroll.com](https://www.crunchyroll.com/).
|
||||||
|
|
||||||
> This README belongs to the _master_ branch which is currently under heavy development towards the next major version (3.0).
|
|
||||||
> It is mostly stable but some issues may still occur.
|
|
||||||
> If you do not want to use an under-development / pre-release version, head over to the _[golang](https://github.com/crunchy-labs/crunchy-cli/tree/golang)_ branch which contains the EOL but last stable version (and documentation for it).
|
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
- Download single videos and entire series from [Crunchyroll](https://www.crunchyroll.com).
|
- Download single videos and entire series from [Crunchyroll](https://www.crunchyroll.com).
|
||||||
- Archive episodes or seasons in an `.mkv` file with multiple subtitles and audios.
|
- Archive episodes or seasons in an `.mkv` file with multiple subtitles and audios.
|
||||||
- Specify a range of episodes to download from an anime.
|
- Specify a range of episodes to download from an anime.
|
||||||
|
- Search through the Crunchyroll collection and return metadata (title, duration, direct stream link, ...) of all media types.
|
||||||
|
|
||||||
## 💾 Get the executable
|
## 💾 Get the executable
|
||||||
|
|
||||||
|
|
@ -50,14 +49,58 @@ A pure [Rust](https://www.rust-lang.org/) CLI for [Crunchyroll](https://www.crun
|
||||||
|
|
||||||
Check out the [releases](https://github.com/crunchy-labs/crunchy-cli/releases) tab and get the binary from the latest (pre-)release.
|
Check out the [releases](https://github.com/crunchy-labs/crunchy-cli/releases) tab and get the binary from the latest (pre-)release.
|
||||||
|
|
||||||
|
### 📦 Get it via a package manager
|
||||||
|
|
||||||
|
- [AUR](https://aur.archlinux.org/)
|
||||||
|
|
||||||
|
If you're using Arch or an Arch based Linux distribution you are able to install our [AUR](https://aur.archlinux.org/) package.
|
||||||
|
You need an [AUR helper](https://wiki.archlinux.org/title/AUR_helpers) like [yay](https://github.com/Jguer/yay) to install it.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# this package builds crunchy-cli manually (recommended)
|
||||||
|
$ yay -S crunchy-cli
|
||||||
|
# this package installs the latest pre-compiled release binary
|
||||||
|
$ yay -S crunchy-cli-bin
|
||||||
|
```
|
||||||
|
|
||||||
|
- [Scoop](https://scoop.sh/)
|
||||||
|
|
||||||
|
For Windows users, we support the [scoop](https://scoop.sh/#/) command-line installer.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ scoop bucket add extras
|
||||||
|
$ scoop install extras/crunchy-cli
|
||||||
|
```
|
||||||
|
|
||||||
|
- [Homebrew](https://brew.sh/)
|
||||||
|
|
||||||
|
For macOS/linux users, we support the [brew](https://brew.sh/#/) command-line installer. Packages are compiled by the [homebrew project](https://formulae.brew.sh/formula/crunchy-cli), and will also install the `openssl@3` and `ffmpeg` dependencies.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ brew install crunchy-cli
|
||||||
|
```
|
||||||
|
|
||||||
|
Supported archs: `x86_64_linux`, `arm64_monterey`, `sonoma`, `ventura`
|
||||||
|
|
||||||
|
- [Nix](https://nixos.org/)
|
||||||
|
|
||||||
|
This requires [nix](https://nixos.org) and you'll probably need `--extra-experimental-features "nix-command flakes"`, depending on your configurations.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ nix <run|shell|develop> github:crunchy-labs/crunchy-cli
|
||||||
|
```
|
||||||
|
|
||||||
### 🛠 Build it yourself
|
### 🛠 Build it yourself
|
||||||
|
|
||||||
Since we do not support every platform and architecture you may have to build the project yourself.
|
Since we do not support every platform and architecture you may have to build the project yourself.
|
||||||
This requires [git](https://git-scm.com/) and [Cargo](https://doc.rust-lang.org/cargo).
|
This requires [git](https://git-scm.com/) and [Cargo](https://doc.rust-lang.org/cargo).
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ git clone https://github.com/crunchy-labs/crunchy-cli
|
$ git clone https://github.com/crunchy-labs/crunchy-cli
|
||||||
$ cd crunchy-cli
|
$ cd crunchy-cli
|
||||||
|
# either just build it (will be available in ./target/release/crunchy-cli)...
|
||||||
$ cargo build --release
|
$ cargo build --release
|
||||||
|
# ... or install it globally
|
||||||
$ cargo install --force --path .
|
$ cargo install --force --path .
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -65,163 +108,598 @@ $ cargo install --force --path .
|
||||||
|
|
||||||
> All shown commands are examples 🧑🏼🍳
|
> All shown commands are examples 🧑🏼🍳
|
||||||
|
|
||||||
|
### Global Flags
|
||||||
|
|
||||||
crunchy-cli requires you to log in.
|
crunchy-cli requires you to log in.
|
||||||
Though you can use a non-premium account, you will not have access to premium content without a subscription.
|
Though you can use a non-premium account, you will not have access to premium content without a subscription.
|
||||||
You can authenticate with your credentials (username:password) or by using a refresh token.
|
You can authenticate with your credentials (email:password) or by using a refresh token.
|
||||||
|
|
||||||
- Credentials
|
- <span id="global-credentials">Credentials</span>
|
||||||
- ```shell
|
|
||||||
$ crunchy --credentials "user:password"
|
```shell
|
||||||
```
|
$ crunchy-cli --credentials "email:password" <command>
|
||||||
- Refresh Token
|
```
|
||||||
- To obtain a refresh token, you have to log in at [crunchyroll.com](https://www.crunchyroll.com/) and extract the `etp_rt` cookie.
|
|
||||||
The easiest way to get it is via a browser extension which lets you export your cookies, like [Cookie-Editor](https://cookie-editor.cgagnier.ca/) ([Firefox](https://addons.mozilla.org/en-US/firefox/addon/cookie-editor/) / [Chrome](https://chrome.google.com/webstore/detail/cookie-editor/hlkenndednhfkekhgcdicdfddnkalmdm)).
|
- <span id="global-anonymous">Stay Anonymous</span>
|
||||||
When installed, look for the `etp_rt` entry and extract its value.
|
|
||||||
- ```shell
|
Login without an account (you won't be able to access premium content):
|
||||||
$ crunchy --etp-rt "4ebf1690-53a4-491a-a2ac-488309120f5d"
|
|
||||||
```
|
```shell
|
||||||
- Stay Anonymous
|
$ crunchy-cli --anonymous <command>
|
||||||
- Skip the login check:
|
```
|
||||||
- ```shell
|
|
||||||
$ crunchy --anonymous
|
### Global settings
|
||||||
```
|
|
||||||
|
You can set specific settings which will be
|
||||||
|
|
||||||
|
- <span id="global-verbose">Verbose output</span>
|
||||||
|
|
||||||
|
If you want to include debug information in the output, use the `-v` / `--verbose` flag to show it.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli -v <command>
|
||||||
|
```
|
||||||
|
|
||||||
|
This flag can't be used in combination with `-q` / `--quiet`.
|
||||||
|
|
||||||
|
- <span id="global-quiet">Quiet output</span>
|
||||||
|
|
||||||
|
If you want to hide all output, use the `-q` / `--quiet` flag to do so.
|
||||||
|
This is especially useful if you want to pipe the output video to an external program (like a video player).
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli -q <command>
|
||||||
|
```
|
||||||
|
|
||||||
|
This flag can't be used in combination with `-v` / `--verbose`.
|
||||||
|
|
||||||
|
- <span id="global-lang">Language</span>
|
||||||
|
|
||||||
|
By default, the resulting metadata like title or description are shown in your system language (if Crunchyroll supports it, else in English).
|
||||||
|
If you want to show the results in another language, use the `--lang` flag to set it.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli --lang de-DE <command>
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="global-experimental-fixes">Experimental fixes</span>
|
||||||
|
|
||||||
|
Crunchyroll constantly changes and breaks its services or just delivers incorrect answers.
|
||||||
|
The `--experimental-fixes` flag tries to fix some of those issues.
|
||||||
|
As the *experimental* in `--experimental-fixes` states, these fixes may or may not break other functionality.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli --experimental-fixes <command>
|
||||||
|
```
|
||||||
|
|
||||||
|
For an overview which parts this flag affects, see the [documentation](https://docs.rs/crunchyroll-rs/latest/crunchyroll_rs/crunchyroll/struct.CrunchyrollBuilder.html) of the underlying Crunchyroll library, all functions beginning with `stabilization_` are applied.
|
||||||
|
|
||||||
|
- <span id="global-proxy">Proxy</span>
|
||||||
|
|
||||||
|
The `--proxy` flag supports https and socks5 proxies to route all your traffic through.
|
||||||
|
This may be helpful to bypass the geo-restrictions Crunchyroll has on certain series.
|
||||||
|
You are also able to set in which part of the cli a proxy should be used.
|
||||||
|
Instead of a normal url you can also use: `<url>:` (only proxies api requests), `:<url>` (only proxies download traffic), `<url>:<url>` (proxies api requests through the first url and download traffic through the second url).
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli --proxy socks5://127.0.0.1:8080 <command>
|
||||||
|
```
|
||||||
|
|
||||||
|
Make sure that proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself.
|
||||||
|
|
||||||
|
- <span id="global-user-agent">User Agent</span>
|
||||||
|
|
||||||
|
There might be cases where a custom user agent is necessary, e.g. to bypass the cloudflare bot protection (#104).
|
||||||
|
In such cases, the `--user-agent` flag can be used to set a custom user agent.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli --user-agent "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)" <command>
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is the user agent, defined in the underlying [library](https://github.com/crunchy-labs/crunchyroll-rs).
|
||||||
|
|
||||||
|
- <span id="global-speed-limit">Speed limit</span>
|
||||||
|
|
||||||
|
If you want to limit how fast requests/downloads should be, you can use the `--speed-limit` flag. Allowed units are `B` (bytes), `KB` (kilobytes) and `MB` (megabytes).
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli --speed-limit 10MB
|
||||||
|
```
|
||||||
|
|
||||||
### Login
|
### Login
|
||||||
|
|
||||||
crunchy-cli can store your session, so you don't have to authenticate every time you execute a command.
|
The `login` command can store your session, so you don't have to authenticate every time you execute a command.
|
||||||
|
|
||||||
Note that the `login` keyword has to be used *last*.
|
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy --etp-rt "4ebf1690-53a4-491a-a2ac-488309120f5d" login
|
# save the refresh token which gets generated when login with credentials.
|
||||||
|
# your email and password won't be stored at any time on disk
|
||||||
|
$ crunchy-cli login --credentials "email:password"
|
||||||
```
|
```
|
||||||
|
|
||||||
With the session stored, you do not need to use `--credentials` / `--etp-rt` anymore. This does not work with `--anonymous`.
|
With the session stored, you do not need to pass `--credentials` / `--anonymous` anymore when you want to execute a command.
|
||||||
|
|
||||||
### Download
|
### Download
|
||||||
|
|
||||||
|
The `download` command lets you download episodes with a specific audio language and optional subtitles.
|
||||||
|
|
||||||
**Supported urls**
|
**Supported urls**
|
||||||
- Single episode
|
|
||||||
|
- Single episode (with [episode filtering](#episode-filtering))
|
||||||
```shell
|
```shell
|
||||||
$ crunchy download https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
$ crunchy-cli download https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
```
|
```
|
||||||
- Series
|
- Series (with [episode filtering](#episode-filtering))
|
||||||
```shell
|
```shell
|
||||||
$ crunchy download https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli download https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
**Options**
|
**Options**
|
||||||
- Audio language
|
|
||||||
|
- <span id="download-audio">Audio language</span>
|
||||||
|
|
||||||
Set the audio language with the `-a` / `--audio` flag.
|
Set the audio language with the `-a` / `--audio` flag.
|
||||||
This only works if the url points to a series since episode urls are language specific.
|
This only works if the url points to a series since episode urls are language specific.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy download -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli download -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
Default is your system locale. If not supported by Crunchyroll, `en-US` (American English) is the default.
|
Default is your system locale. If not supported by Crunchyroll, `en-US` (American English) is the default.
|
||||||
|
|
||||||
- Subtitle language
|
- <span id="download-subtitle">Subtitle language</span>
|
||||||
|
|
||||||
Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag.
|
Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag.
|
||||||
The subtitles will be burned into the video track (cf. [hardsub](https://www.urbandictionary.com/define.php?term=hardsub)) and thus can not be turned off.
|
In formats that support it (.mp4, .mov and .mkv ), subtitles are stored as soft-subs. All other formats are hardsubbed: the subtitles will be burned into the video track (cf. [hardsub](https://www.urbandictionary.com/define.php?term=hardsub)) and thus can not be turned off.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy download -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli download -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
Default is none.
|
Default is none.
|
||||||
|
|
||||||
- Output template
|
- <span id="download-output">Output template</span>
|
||||||
|
|
||||||
Define an output template by using the `-o` / `--output` flag.
|
Define an output template by using the `-o` / `--output` flag.
|
||||||
If you want to use any other file format than [`.ts`](https://en.wikipedia.org/wiki/MPEG_transport_stream) you need [ffmpeg](https://ffmpeg.org/).
|
|
||||||
```shell
|
|
||||||
$ crunchy download -o "ditf.ts" https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
|
||||||
```
|
|
||||||
Default is `{title}.ts`.
|
|
||||||
|
|
||||||
- Resolution
|
```shell
|
||||||
|
$ crunchy-cli download -o "ditf.mp4" https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is `{title}.mp4`. See the [Template Options section](#output-template-options) below for more options.
|
||||||
|
|
||||||
|
- <span id="download-output-specials">Output template for special episodes</span>
|
||||||
|
|
||||||
|
Define an output template which only gets used when the episode is a special (episode number is 0 or has non-zero decimal places) by using the `--output-special` flag.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --output-specials "Special EP - {title}" https://www.crunchyroll.com/watch/GY8D975JY/veldoras-journal
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is the template, set by the `-o` / `--output` flag. See the [Template Options section](#output-template-options) below for more options.
|
||||||
|
|
||||||
|
- <span id="download-universal-output">Universal output</span>
|
||||||
|
|
||||||
|
The output template options can be forced to get sanitized via the `--universal-output` flag to be valid across all supported operating systems (Windows has a lot of characters which aren't allowed in filenames...).
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --universal-output -o https://www.crunchyroll.com/watch/G7PU4XD48/tales-veldoras-journal-2
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="download-resolution">Resolution</span>
|
||||||
|
|
||||||
The resolution for videos can be set via the `-r` / `--resolution` flag.
|
The resolution for videos can be set via the `-r` / `--resolution` flag.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy download -r worst https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
$ crunchy-cli download -r worst https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
```
|
```
|
||||||
|
|
||||||
Default is `best`.
|
Default is `best`.
|
||||||
|
|
||||||
|
- <span id="download-language-tagging">Language tagging</span>
|
||||||
|
|
||||||
|
You can force the usage of a specific language tagging in the output file with the `--language-tagging` flag.
|
||||||
|
This might be useful as some video players doesn't recognize the language tagging Crunchyroll uses internally.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --language-tagging ietf https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="download-ffmpeg-preset">FFmpeg Preset</span>
|
||||||
|
|
||||||
|
You can specify specific built-in presets with the `--ffmpeg-preset` flag to convert videos to a specific coding while downloading.
|
||||||
|
Multiple predefined presets how videos should be encoded (h264, h265, av1, ...) are available, you can see them with `crunchy-cli download --help`.
|
||||||
|
If you need more specific ffmpeg customizations you could either convert the output file manually or use ffmpeg output arguments as value for this flag.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --ffmpeg-preset av1-lossless https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="download-ffmpeg-threads">FFmpeg threads</span>
|
||||||
|
|
||||||
|
If you want to manually set how many threads FFmpeg should use, you can use the `--ffmpeg-threads` flag. This does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --ffmpeg-threads 4 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="download-skip-existing">Skip existing</span>
|
||||||
|
|
||||||
|
If you re-download a series but want to skip episodes you've already downloaded, the `--skip-existing` flag skips the already existing/downloaded files.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --skip-existing https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="download-skip-specials">Skip specials</span>
|
||||||
|
|
||||||
|
If you doesn't want to download special episodes, use the `--skip-specials` flag to skip the download of them.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --skip-specials https://www.crunchyroll.com/series/GYZJ43JMR/that-time-i-got-reincarnated-as-a-slime[S2]
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="download-include-chapters">Include chapters</span>
|
||||||
|
|
||||||
|
Crunchyroll sometimes provide information about skippable events like the intro or credits.
|
||||||
|
These information can be stored as chapters in the resulting video file via the `--include-chapters` flag.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --include-chapters https://www.crunchyroll.com/watch/G0DUND0K2/the-journeys-end
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="download-yes">Yes</span>
|
||||||
|
|
||||||
|
Sometimes different seasons have the same season number (e.g. Sword Art Online Alicization and Alicization War of Underworld are both marked as season 3), in such cases an interactive prompt is shown which needs user further user input to decide which season to download.
|
||||||
|
The `--yes` flag suppresses this interactive prompt and just downloads all seasons.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --yes https://www.crunchyroll.com/series/GR49G9VP6/sword-art-online
|
||||||
|
```
|
||||||
|
|
||||||
|
If you've passed the `-q` / `--quiet` [global flag](#global-settings), this flag is automatically set.
|
||||||
|
|
||||||
|
- <span id="download-force-hardsub">Force hardsub</span>
|
||||||
|
|
||||||
|
If you want to burn-in the subtitles, even if the output format/container supports soft-subs (e.g. `.mp4`), use the `--force-hardsub` flag to do so.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download --force-hardsub -s en-US https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="download-threads">Threads</span>
|
||||||
|
|
||||||
|
To increase the download speed, video segments are downloaded simultaneously by creating multiple threads.
|
||||||
|
If you want to manually specify how many threads to use when downloading, do this with the `-t` / `--threads` flag.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli download -t 1 https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
|
The default thread count is the count of cpu threads your pc has.
|
||||||
|
|
||||||
### Archive
|
### Archive
|
||||||
|
|
||||||
**Supported urls**
|
The `archive` command lets you download episodes with multiple audios and subtitles and merges it into a `.mkv` file.
|
||||||
- Series
|
|
||||||
|
|
||||||
Only series urls are supported, because episode urls are locked to a single audio language.
|
**Supported urls**
|
||||||
|
|
||||||
|
- Single episode (with [episode filtering](#episode-filtering))
|
||||||
```shell
|
```shell
|
||||||
$ crunchy archive https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli archive https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
- Series (with [episode filtering](#episode-filtering))
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
**Options**
|
**Options**
|
||||||
- Audio languages
|
|
||||||
|
- <span id="archive-audio">Audio languages</span>
|
||||||
|
|
||||||
Set the audio language with the `-a` / `--audio` flag. Can be used multiple times.
|
Set the audio language with the `-a` / `--audio` flag. Can be used multiple times.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy archive -a ja-JP -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli archive -a ja-JP -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
Default is your system locale (if not supported by Crunchyroll, `en-US` (American English) and `ja-JP` (Japanese) are used).
|
Default is your system locale (if not supported by Crunchyroll, `en-US` (American English) and `ja-JP` (Japanese) are used).
|
||||||
|
|
||||||
- Subtitle languages
|
- <span id="archive-subtitle">Subtitle languages</span>
|
||||||
|
|
||||||
Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag.
|
Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy archive -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli archive -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
Default is `all` subtitles.
|
Default is `all` subtitles.
|
||||||
|
|
||||||
- Output template
|
- <span id="archive-output">Output template</span>
|
||||||
|
|
||||||
Define an output template by using the `-o` / `--output` flag.
|
Define an output template by using the `-o` / `--output` flag.
|
||||||
crunchy-cli uses the [`.mkv`](https://en.wikipedia.org/wiki/Matroska) container format, because of it's ability to store multiple audio, video and subtitle tracks at once.
|
_crunchy-cli_ exclusively uses the [`.mkv`](https://en.wikipedia.org/wiki/Matroska) container format, because of its ability to store multiple audio, video and subtitle tracks at once.
|
||||||
```shell
|
|
||||||
$ crunchy archive -o "{title}.mkv" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
|
||||||
```
|
|
||||||
Default is `{title}.mkv`.
|
|
||||||
|
|
||||||
- Resolution
|
```shell
|
||||||
|
$ crunchy-cli archive -o "{title}.mkv" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is `{title}.mkv`. See the [Template Options section](#output-template-options) below for more options.
|
||||||
|
|
||||||
|
- <span id="archive-output-specials">Output template for special episodes</span>
|
||||||
|
|
||||||
|
Define an output template which only gets used when the episode is a special (episode number is 0 or has non-zero decimal places) by using the `--output-special` flag.
|
||||||
|
_crunchy-cli_ exclusively uses the [`.mkv`](https://en.wikipedia.org/wiki/Matroska) container format, because of its ability to store multiple audio, video and subtitle tracks at once.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --output-specials "Special EP - {title}" https://www.crunchyroll.com/watch/GY8D975JY/veldoras-journal
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is the template, set by the `-o` / `--output` flag. See the [Template Options section](#output-template-options) below for more options.
|
||||||
|
|
||||||
|
- <span id="archive-universal-output">Universal output</span>
|
||||||
|
|
||||||
|
The output template options can be forced to get sanitized via the `--universal-output` flag to be valid across all supported operating systems (Windows has a lot of characters which aren't allowed in filenames...).
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --universal-output -o https://www.crunchyroll.com/watch/G7PU4XD48/tales-veldoras-journal-2
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="archive-resolution">Resolution</span>
|
||||||
|
|
||||||
The resolution for videos can be set via the `-r` / `--resolution` flag.
|
The resolution for videos can be set via the `-r` / `--resolution` flag.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy archive -r worst https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli archive -r worst https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
Default is `best`.
|
Default is `best`.
|
||||||
|
|
||||||
- Merge behavior
|
- <span id="archive-merge">Merge behavior</span>
|
||||||
|
|
||||||
Due to censorship, some episodes have multiple lengths for different languages.
|
Due to censorship or additional intros, some episodes have multiple lengths for different languages.
|
||||||
In the best case, when multiple audio & subtitle tracks are used, there is only one *video* track and all other languages can be stored as audio-only.
|
In the best case, when multiple audio & subtitle tracks are used, there is only one *video* track and all other languages can be stored as audio-only.
|
||||||
But, as said, this is not always the case.
|
But, as said, this is not always the case.
|
||||||
With the `-m` / `--merge` flag you can define the behaviour when an episodes' video tracks differ in length.
|
With the `-m` / `--merge` flag you can define the behaviour when an episodes' video tracks differ in length.
|
||||||
Valid options are `audio` - store one video and all other languages as audio only; `video` - store the video + audio for every language; `auto` - detect if videos differ in length: if so, behave like `video` - otherwise like `audio`.
|
Valid options are `audio` - store one video and all other languages as audio only; `video` - store the video + audio for every language; `auto` - detect if videos differ in length: if so, behave like `video` - otherwise like `audio`; `sync` - detect if videos differ in length: if so, it tries to find the offset of matching audio parts and removes the offset from the beginning, otherwise it behaves like `audio`.
|
||||||
Subtitles will always match the primary audio and video.
|
Subtitles will always match the primary audio and video.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy archive -m audio https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli archive -m audio https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
Default is `auto`.
|
Default is `auto`.
|
||||||
|
|
||||||
- Default subtitle
|
- <span id="archive-merge-time-tolerance">Merge time tolerance</span>
|
||||||
|
|
||||||
|
Sometimes two video tracks are downloaded with `--merge` set to `auto` even if they only differ some milliseconds in length which shouldn't be noticeable to the viewer.
|
||||||
|
To prevent this, you can specify a range in milliseconds with the `--merge-time-tolerance` flag that only downloads one video if the length difference is in the given range.
|
||||||
|
|
||||||
`--default_subtitle` Set which subtitle language is to be flagged as **default** and **forced**.
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy archive --default_subtitle en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli archive -m auto --merge-time-tolerance 100 https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Default are `200` milliseconds.
|
||||||
|
|
||||||
|
- <span id="archive-merge-sync-tolerance">Merge sync tolerance</span>
|
||||||
|
|
||||||
|
Sometimes two video tracks are downloaded with `--merge` set to `sync` because the audio fingerprinting fails to identify matching audio parts (e.g. opening).
|
||||||
|
To prevent this, you can use the `--merge-sync-tolerance` flag to specify the difference by which two fingerprints are considered equal.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive -m sync --merge-sync-tolerance 3 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is `6`.
|
||||||
|
|
||||||
|
- <span id="archive-merge-sync-precision">Merge sync precision</span>
|
||||||
|
|
||||||
|
If you use `--merge` set to `sync` and the syncing seems to be not accurate enough or takes to long, you can use the `--sync-precision` flag to specify the amount of offset determination runs from which the final offset is calculated.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive -m sync --merge-sync-precision 3 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is `4`.
|
||||||
|
|
||||||
|
- <span id="archive-language-tagging">Language tagging</span>
|
||||||
|
|
||||||
|
You can force the usage of a specific language tagging in the output file with the `--language-tagging` flag.
|
||||||
|
This might be useful as some video players doesn't recognize the language tagging Crunchyroll uses internally.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --language-tagging ietf https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="archive-ffmpeg-preset">FFmpeg Preset</span>
|
||||||
|
|
||||||
|
You can specify specific built-in presets with the `--ffmpeg-preset` flag to convert videos to a specific coding while downloading.
|
||||||
|
Multiple predefined presets how videos should be encoded (h264, h265, av1, ...) are available, you can see them with `crunchy-cli archive --help`.
|
||||||
|
If you need more specific ffmpeg customizations you could either convert the output file manually or use ffmpeg output arguments as value for this flag.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --ffmpeg-preset av1-lossless https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="archive-ffmpeg-threads">FFmpeg threads</span>
|
||||||
|
|
||||||
|
If you want to manually set how many threads FFmpeg should use, you can use the `--ffmpeg-threads` flag. This does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --ffmpeg-threads 4 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="archive-default-subtitle">Default subtitle</span>
|
||||||
|
|
||||||
|
`--default-subtitle` Set which subtitle language is to be flagged as **default** and **forced**.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --default-subtitle en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
Default is none.
|
Default is none.
|
||||||
|
|
||||||
- Subtitle optimizations
|
- <span id="archive-include-fonts">Include fonts</span>
|
||||||
|
|
||||||
|
You can include the fonts required by subtitles directly into the output file with the `--include-fonts` flag. This will use the embedded font for subtitles instead of the system font when playing the video in a video player which supports it.
|
||||||
|
|
||||||
Crunchyroll's subtitles look weird in some players (#66).
|
|
||||||
This can be fixed by adding a specific entry to the subtitles.
|
|
||||||
Even though this entry is a de facto standard, it is not defined in the official specification for the `.ass` format (cf. [Advanced SubStation Subtitles](https://wiki.videolan.org/SubStation_Alpha)). This could cause compatibility issues, but no issues have been reported yet.
|
|
||||||
`--no_subtitle_optimizations` disables these optimizations.
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy archive --no_subtitle_optimizations https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy-cli archive --include-fonts https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
### Episode filtering
|
- <span id="archive-include-chapters">Include chapters</span>
|
||||||
|
|
||||||
|
Crunchyroll sometimes provide information about skippable events like the intro or credits.
|
||||||
|
These information can be stored as chapters in the resulting video file via the `--include-chapters` flag.
|
||||||
|
This flag only works if `--merge` is set to `audio` because chapters cannot be mapped to a specific video steam.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --include-chapters https://www.crunchyroll.com/watch/G0DUND0K2/the-journeys-end
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="archive-skip-existing">Skip existing</span>
|
||||||
|
|
||||||
|
If you re-download a series but want to skip episodes you've already downloaded, the `--skip-existing` flag skips the already existing/downloaded files.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --skip-existing https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="archive-skip-existing-method">Skip existing method</span>
|
||||||
|
|
||||||
|
By default, already existing files are determined by their name and the download of the corresponding episode is skipped.
|
||||||
|
But sometimes Crunchyroll adds dubs or subs to an already existing episode and these changes aren't recognized and `--skip-existing` just skips it.
|
||||||
|
This behavior can be changed by the `--skip-existing-method` flag. Valid options are `audio` and `subtitle` (if the file already exists but the audio/subtitle are less from what should be downloaded, the episode gets downloaded and the file overwritten).
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --skip-existing-method audio --skip-existing-method video https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="archive-skip-specials">Skip specials</span>
|
||||||
|
|
||||||
|
If you doesn't want to download special episodes, use the `--skip-specials` flag to skip the download of them.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --skip-specials https://www.crunchyroll.com/series/GYZJ43JMR/that-time-i-got-reincarnated-as-a-slime[S2]
|
||||||
|
```
|
||||||
|
|
||||||
|
- <span id="archive-yes">Yes</span>
|
||||||
|
|
||||||
|
Sometimes different seasons have the same season number (e.g. Sword Art Online Alicization and Alicization War of Underworld are both marked as season 3), in such cases an interactive prompt is shown which needs user further user input to decide which season to download.
|
||||||
|
The `--yes` flag suppresses this interactive prompt and just downloads all seasons.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive --yes https://www.crunchyroll.com/series/GR49G9VP6/sword-art-online
|
||||||
|
```
|
||||||
|
|
||||||
|
If you've passed the `-q` / `--quiet` [global flag](#global-settings), this flag is automatically set.
|
||||||
|
|
||||||
|
- <span id="archive-threads">Threads</span>
|
||||||
|
|
||||||
|
To increase the download speed, video segments are downloaded simultaneously by creating multiple threads.
|
||||||
|
If you want to manually specify how many threads to use when downloading, do this with the `-t` / `--threads` flag.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive -t 1 https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
|
The default thread count is the count of cpu threads your pc has.
|
||||||
|
|
||||||
|
### Search
|
||||||
|
|
||||||
|
The `search` command is a powerful tool to query the Crunchyroll library.
|
||||||
|
It behaves like the regular search on the website but is able to further process the results and return everything it can find, from the series title down to the raw stream url.
|
||||||
|
_Using this command with the `--anonymous` flag or a non-premium account may return incomplete results._
|
||||||
|
|
||||||
|
**Supported urls/input**
|
||||||
|
|
||||||
|
- Single episode (with [episode filtering](#episode-filtering))
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli search https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
- Series (with [episode filtering](#episode-filtering))
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli search https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
- Search input
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli search "darling in the franxx"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Options**
|
||||||
|
|
||||||
|
- <span id="search-audio">Audio</span>
|
||||||
|
|
||||||
|
Set the audio language to search via the `--audio` flag. Can be used multiple times.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli search --audio en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is your system locale.
|
||||||
|
|
||||||
|
- <span id="search-result-limit">Result limit</span>
|
||||||
|
|
||||||
|
If your input is a search term instead of an url, you have multiple options to control which results to process.
|
||||||
|
The `--search-top-results-limit` flag sets the limit of top search results to process.
|
||||||
|
`--search-series-limit` sets the limit of only series, `--search-movie-listing-limit` of only movie listings, `--search-episode-limit` of only episodes and `--search-music-limit` of only concerts and music videos.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli search --search-top-results-limit 10 "darling in the franxx"
|
||||||
|
# only return series which have 'darling' in it. do not return top results which might also be non-series items
|
||||||
|
$ crunchy-cli search --search-top-results-limit 0 --search-series-limit 10 "darling"
|
||||||
|
# this returns 2 top results, 3 movie listings, 5 episodes and 1 music item as result
|
||||||
|
$ crunchy-cli search --search-top-results-limit 2 --search-movie-listing-limit 3 --search-episode-limit 5 --search-music-limit 1 "test"
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is `5` for `--search-top-results-limit`, `0` for all others.
|
||||||
|
|
||||||
|
- Output template
|
||||||
|
|
||||||
|
The search command is designed to show only the specific information you want.
|
||||||
|
This is done with the `-o`/`--output` flag.
|
||||||
|
You can specify keywords in a specific pattern, and they will get replaced in the output text.
|
||||||
|
The required pattern for this begins with `{{`, then the keyword, and closes with `}}` (e.g. `{{episode.title}}`).
|
||||||
|
For example, if you want to get the title of an episode, you can use `Title: {{episode.title}}` and `{{episode.title}}` will be replaced with the episode title.
|
||||||
|
You can see all supported keywords with `crunchy-cli search --help`.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli search -o "{{series.title}}" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
|
Default is `S{{season.number}}E{{episode.number}} - {{episode.title}}`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Output Template Options
|
||||||
|
|
||||||
|
You can use various template options to change how the filename is processed. The following tags are available:
|
||||||
|
|
||||||
|
- `{title}` → Title of the video
|
||||||
|
- `{series_name}` → Name of the series
|
||||||
|
- `{season_name}` → Name of the season
|
||||||
|
- `{audio}` → Audio language of the video
|
||||||
|
- `{width}` → Width of the video
|
||||||
|
- `{height}` → Height of the video
|
||||||
|
- `{season_number}` → Number of the season
|
||||||
|
- `{episode_number}` → Number of the episode
|
||||||
|
- `{relative_episode_number}` → Number of the episode relative to its season
|
||||||
|
- `{sequence_number}` → Like `{episode_number}` but without possible non-number characters
|
||||||
|
- `{relative_sequence_number}` → Like `{relative_episode_number}` but with support for episode 0's and .5's
|
||||||
|
- `{release_year}` → Release year of the video
|
||||||
|
- `{release_month}` → Release month of the video
|
||||||
|
- `{release_day} ` → Release day of the video
|
||||||
|
- `{series_id}` → ID of the series
|
||||||
|
- `{season_id}` → ID of the season
|
||||||
|
- `{episode_id}` → ID of the episode
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ crunchy-cli archive -o "[S{season_number}E{episode_number}] {title}.mkv" https://www.crunchyroll.com/series/G8DHV7W21/dragon-ball
|
||||||
|
# Output file: '[S01E01] Secret of the Dragon Ball.mkv'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Episode filtering
|
||||||
|
|
||||||
Filters patterns can be used to download a specific range of episodes from a single series.
|
Filters patterns can be used to download a specific range of episodes from a single series.
|
||||||
|
|
||||||
|
|
@ -229,6 +707,7 @@ A filter pattern may consist of either a season, an episode, or a combination of
|
||||||
When used in combination, seasons `S` must be defined before episodes `E`.
|
When used in combination, seasons `S` must be defined before episodes `E`.
|
||||||
|
|
||||||
There are many possible patterns, for example:
|
There are many possible patterns, for example:
|
||||||
|
|
||||||
- `...[E5]` - Download the fifth episode.
|
- `...[E5]` - Download the fifth episode.
|
||||||
- `...[S1]` - Download the whole first season.
|
- `...[S1]` - Download the whole first season.
|
||||||
- `...[-S2]` - Download the first two seasons.
|
- `...[-S2]` - Download the first two seasons.
|
||||||
|
|
@ -237,14 +716,19 @@ There are many possible patterns, for example:
|
||||||
- `...[S3,S5]` - Download season three and five.
|
- `...[S3,S5]` - Download season three and five.
|
||||||
- `...[S1-S3,S4E2-S4E6]` - Download season one to three, then episodes two to six from season four.
|
- `...[S1-S3,S4E2-S4E6]` - Download season one to three, then episodes two to six from season four.
|
||||||
|
|
||||||
In practice, it would look like this: `https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx[E1-E5]`
|
In practice, it would look like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx[E1-E5]
|
||||||
|
```
|
||||||
|
|
||||||
# 📜 Disclaimer
|
# 📜 Disclaimer
|
||||||
|
|
||||||
This tool is **ONLY** meant for private use. You need a subscription to [`💳 Crunchyroll Premium 💳`](https://www.crunchyroll.com/welcome#plans) to download premium content.
|
This tool is meant for private use only.
|
||||||
|
You need a [Crunchyroll Premium](https://www.crunchyroll.com/welcome#plans) subscription to access premium content.
|
||||||
|
|
||||||
**You are entirely responsible for what happens to files you downloaded through crunchy-cli.**
|
**You are entirely responsible for what happens when you use crunchy-cli.**
|
||||||
|
|
||||||
# ⚖ License
|
# ⚖ License
|
||||||
|
|
||||||
This project is licensed under the GNU General Public License v3.0 (GPL-3.0) - see the [LICENSE](LICENSE) file for more details.
|
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for more details.
|
||||||
|
|
|
||||||
17
build.rs
17
build.rs
|
|
@ -3,6 +3,22 @@ use clap_complete::shells;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
fn main() -> std::io::Result<()> {
|
fn main() -> std::io::Result<()> {
|
||||||
|
let rustls_tls = cfg!(feature = "rustls-tls");
|
||||||
|
let native_tls = cfg!(feature = "native-tls");
|
||||||
|
let openssl_tls = cfg!(any(feature = "openssl-tls", feature = "openssl-tls-static"));
|
||||||
|
|
||||||
|
if rustls_tls as u8 + native_tls as u8 + openssl_tls as u8 > 1 {
|
||||||
|
let active_tls_backend = if openssl_tls {
|
||||||
|
"openssl"
|
||||||
|
} else if native_tls {
|
||||||
|
"native tls"
|
||||||
|
} else {
|
||||||
|
"rustls"
|
||||||
|
};
|
||||||
|
|
||||||
|
println!("cargo:warning=Multiple tls backends are activated (through the '*-tls' features). Consider to activate only one as it is not possible to change the backend during runtime. The active backend for this build will be '{}'.", active_tls_backend)
|
||||||
|
}
|
||||||
|
|
||||||
// note that we're using an anti-pattern here / violate the rust conventions. build script are
|
// note that we're using an anti-pattern here / violate the rust conventions. build script are
|
||||||
// not supposed to write outside of 'OUT_DIR'. to have the generated files in the build "root"
|
// not supposed to write outside of 'OUT_DIR'. to have the generated files in the build "root"
|
||||||
// (the same directory where the output binary lives) is much simpler than in 'OUT_DIR' since
|
// (the same directory where the output binary lives) is much simpler than in 'OUT_DIR' since
|
||||||
|
|
@ -100,6 +116,7 @@ fn generate_manpages(out_dir: PathBuf) -> std::io::Result<()> {
|
||||||
generate_command_manpage(crunchy_cli_core::Archive::command(), &out_dir, "archive")?;
|
generate_command_manpage(crunchy_cli_core::Archive::command(), &out_dir, "archive")?;
|
||||||
generate_command_manpage(crunchy_cli_core::Download::command(), &out_dir, "download")?;
|
generate_command_manpage(crunchy_cli_core::Download::command(), &out_dir, "download")?;
|
||||||
generate_command_manpage(crunchy_cli_core::Login::command(), &out_dir, "login")?;
|
generate_command_manpage(crunchy_cli_core::Login::command(), &out_dir, "login")?;
|
||||||
|
generate_command_manpage(crunchy_cli_core::Search::command(), &out_dir, "search")?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
2284
crunchy-cli-core/Cargo.lock
generated
2284
crunchy-cli-core/Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -1,33 +1,51 @@
|
||||||
[package]
|
[package]
|
||||||
name = "crunchy-cli-core"
|
name = "crunchy-cli-core"
|
||||||
authors = ["Crunchy Labs Maintainers"]
|
authors = ["Crunchy Labs Maintainers"]
|
||||||
version = "3.0.0-dev.13"
|
version = "3.6.7"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
rustls-tls = ["reqwest/rustls-tls"]
|
||||||
|
native-tls = ["reqwest/native-tls", "reqwest/native-tls-alpn"]
|
||||||
|
openssl-tls = ["reqwest/native-tls", "reqwest/native-tls-alpn", "dep:rustls-native-certs"]
|
||||||
|
openssl-tls-static = ["reqwest/native-tls", "reqwest/native-tls-alpn", "reqwest/native-tls-vendored", "dep:rustls-native-certs"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
async-trait = "0.1"
|
async-speed-limit = "0.4"
|
||||||
clap = { version = "4.2", features = ["derive", "string"] }
|
clap = { version = "4.5", features = ["derive", "string"] }
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
crunchyroll-rs = { version = "0.3.4", features = ["dash-stream"] }
|
crunchyroll-rs = { version = "0.11.4", features = ["experimental-stabilizations", "tower"] }
|
||||||
ctrlc = "3.2"
|
ctrlc = "3.4"
|
||||||
|
dialoguer = { version = "0.11", default-features = false }
|
||||||
dirs = "5.0"
|
dirs = "5.0"
|
||||||
derive_setters = "0.1"
|
derive_setters = "0.1"
|
||||||
|
futures-util = { version = "0.3", features = ["io"] }
|
||||||
fs2 = "0.4"
|
fs2 = "0.4"
|
||||||
|
http = "1.1"
|
||||||
indicatif = "0.17"
|
indicatif = "0.17"
|
||||||
lazy_static = "1.4"
|
lazy_static = "1.4"
|
||||||
log = { version = "0.4", features = ["std"] }
|
log = { version = "0.4", features = ["std"] }
|
||||||
num_cpus = "1.15"
|
num_cpus = "1.16"
|
||||||
regex = "1.8"
|
regex = "1.10"
|
||||||
reqwest = { version = "0.11", default-features = false, features = ["socks"] }
|
reqwest = { version = "0.12", features = ["socks", "stream"] }
|
||||||
sanitize-filename = "0.4"
|
rsubs-lib = "~0.3.2"
|
||||||
|
rusty-chromaprint = "0.2"
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
shlex = "1.1"
|
serde_plain = "1.0"
|
||||||
tempfile = "3.5"
|
shlex = "1.3"
|
||||||
terminal_size = "0.2"
|
|
||||||
tokio = { version = "1.27", features = ["macros", "rt-multi-thread", "time"] }
|
|
||||||
sys-locale = "0.3"
|
sys-locale = "0.3"
|
||||||
|
tempfile = "3.10"
|
||||||
|
time = "0.3"
|
||||||
|
tokio = { version = "1.38", features = ["io-util", "macros", "net", "rt-multi-thread", "time"] }
|
||||||
|
tokio-util = "0.7"
|
||||||
|
tower-service = "0.3"
|
||||||
|
rustls-native-certs = { version = "0.7", optional = true }
|
||||||
|
|
||||||
|
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||||
|
nix = { version = "0.28", features = ["fs"] }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
fn main() -> std::io::Result<()> {
|
fn main() -> std::io::Result<()> {
|
||||||
println!(
|
println!(
|
||||||
"cargo:rustc-env=GIT_HASH={}",
|
"cargo:rustc-env=GIT_HASH={}",
|
||||||
get_short_commit_hash()?.unwrap_or_default()
|
std::env::var("CRUNCHY_CLI_GIT_HASH")
|
||||||
|
.or::<std::io::Error>(Ok(get_short_commit_hash()?.unwrap_or_default()))?
|
||||||
);
|
);
|
||||||
println!(
|
println!(
|
||||||
"cargo:rustc-env=BUILD_DATE={}",
|
"cargo:rustc-env=BUILD_DATE={}",
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,15 @@
|
||||||
use crate::archive::filter::ArchiveFilter;
|
|
||||||
use crate::utils::context::Context;
|
use crate::utils::context::Context;
|
||||||
use crate::utils::download::{DownloadBuilder, DownloadFormat, MergeBehavior};
|
use crate::utils::download::{
|
||||||
|
DownloadBuilder, DownloadFormat, DownloadFormatMetadata, MergeBehavior,
|
||||||
|
};
|
||||||
use crate::utils::ffmpeg::FFmpegPreset;
|
use crate::utils::ffmpeg::FFmpegPreset;
|
||||||
use crate::utils::filter::Filter;
|
use crate::utils::filter::{Filter, FilterMediaScope};
|
||||||
use crate::utils::format::{Format, SingleFormat};
|
use crate::utils::format::{Format, SingleFormat};
|
||||||
use crate::utils::locale::all_locale_in_locales;
|
use crate::utils::locale::{all_locale_in_locales, resolve_locales, LanguageTagging};
|
||||||
use crate::utils::log::progress;
|
use crate::utils::log::progress;
|
||||||
use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
|
use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
|
||||||
use crate::utils::parse::parse_url;
|
use crate::utils::parse::parse_url;
|
||||||
use crate::utils::video::variant_data_from_stream;
|
use crate::utils::video::stream_data_from_stream;
|
||||||
use crate::Execute;
|
use crate::Execute;
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
|
@ -16,49 +17,69 @@ use chrono::Duration;
|
||||||
use crunchyroll_rs::media::{Resolution, Subtitle};
|
use crunchyroll_rs::media::{Resolution, Subtitle};
|
||||||
use crunchyroll_rs::Locale;
|
use crunchyroll_rs::Locale;
|
||||||
use log::{debug, warn};
|
use log::{debug, warn};
|
||||||
use std::collections::HashMap;
|
use regex::Regex;
|
||||||
use std::path::PathBuf;
|
use std::fmt::{Display, Formatter};
|
||||||
|
use std::iter::zip;
|
||||||
|
use std::ops::Sub;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::process::{Command, Stdio};
|
||||||
|
|
||||||
#[derive(Clone, Debug, clap::Parser)]
|
#[derive(Clone, Debug, clap::Parser)]
|
||||||
#[clap(about = "Archive a video")]
|
#[clap(about = "Archive a video")]
|
||||||
#[command(arg_required_else_help(true))]
|
#[command(arg_required_else_help(true))]
|
||||||
#[command()]
|
|
||||||
pub struct Archive {
|
pub struct Archive {
|
||||||
#[arg(help = format!("Audio languages. Can be used multiple times. \
|
#[arg(help = format!("Audio languages. Can be used multiple times. \
|
||||||
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
#[arg(long_help = format!("Audio languages. Can be used multiple times. \
|
#[arg(long_help = format!("Audio languages. Can be used multiple times. \
|
||||||
Available languages are:\n{}", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
Available languages are:\n {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
||||||
#[arg(short, long, default_values_t = vec![Locale::ja_JP, crate::utils::locale::system_locale()])]
|
#[arg(short, long, default_values_t = vec![Locale::ja_JP, crate::utils::locale::system_locale()])]
|
||||||
pub(crate) audio: Vec<Locale>,
|
pub(crate) audio: Vec<Locale>,
|
||||||
#[arg(help = "Deprecated. Use '-a' / '--audio' instead")]
|
#[arg(skip)]
|
||||||
#[arg(short, long)]
|
output_audio_locales: Vec<String>,
|
||||||
locale: Vec<Locale>,
|
|
||||||
#[arg(help = format!("Subtitle languages. Can be used multiple times. \
|
#[arg(help = format!("Subtitle languages. Can be used multiple times. \
|
||||||
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
#[arg(long_help = format!("Subtitle languages. Can be used multiple times. \
|
#[arg(long_help = format!("Subtitle languages. Can be used multiple times. \
|
||||||
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
Available languages are: {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
#[arg(short, long, default_values_t = Locale::all())]
|
#[arg(short, long, default_values_t = Locale::all())]
|
||||||
pub(crate) subtitle: Vec<Locale>,
|
pub(crate) subtitle: Vec<Locale>,
|
||||||
|
#[arg(skip)]
|
||||||
|
output_subtitle_locales: Vec<String>,
|
||||||
|
|
||||||
#[arg(help = "Name of the output file")]
|
#[arg(help = "Name of the output file")]
|
||||||
#[arg(long_help = "Name of the output file.\
|
#[arg(long_help = "Name of the output file. \
|
||||||
If you use one of the following pattern they will get replaced:\n \
|
If you use one of the following pattern they will get replaced:\n \
|
||||||
{title} → Title of the video\n \
|
{title} → Title of the video\n \
|
||||||
{series_name} → Name of the series\n \
|
{series_name} → Name of the series\n \
|
||||||
{season_name} → Name of the season\n \
|
{season_name} → Name of the season\n \
|
||||||
{audio} → Audio language of the video\n \
|
{audio} → Audio language of the video\n \
|
||||||
{resolution} → Resolution of the video\n \
|
{width} → Width of the video\n \
|
||||||
{season_number} → Number of the season\n \
|
{height} → Height of the video\n \
|
||||||
{episode_number} → Number of the episode\n \
|
{season_number} → Number of the season\n \
|
||||||
{relative_episode_number} → Number of the episode relative to its season\n \
|
{episode_number} → Number of the episode\n \
|
||||||
{series_id} → ID of the series\n \
|
{relative_episode_number} → Number of the episode relative to its season\n \
|
||||||
{season_id} → ID of the season\n \
|
{sequence_number} → Like '{episode_number}' but without possible non-number characters\n \
|
||||||
{episode_id} → ID of the episode")]
|
{relative_sequence_number} → Like '{relative_episode_number}' but with support for episode 0's and .5's\n \
|
||||||
|
{release_year} → Release year of the video\n \
|
||||||
|
{release_month} → Release month of the video\n \
|
||||||
|
{release_day} → Release day of the video\n \
|
||||||
|
{series_id} → ID of the series\n \
|
||||||
|
{season_id} → ID of the season\n \
|
||||||
|
{episode_id} → ID of the episode")]
|
||||||
#[arg(short, long, default_value = "{title}.mkv")]
|
#[arg(short, long, default_value = "{title}.mkv")]
|
||||||
pub(crate) output: String,
|
pub(crate) output: String,
|
||||||
|
#[arg(help = "Name of the output file if the episode is a special")]
|
||||||
|
#[arg(long_help = "Name of the output file if the episode is a special. \
|
||||||
|
If not set, the '-o'/'--output' flag will be used as name template")]
|
||||||
|
#[arg(long)]
|
||||||
|
pub(crate) output_specials: Option<String>,
|
||||||
|
|
||||||
|
#[arg(help = "Sanitize the output file for use with all operating systems. \
|
||||||
|
This option only affects template options and not static characters.")]
|
||||||
|
#[arg(long, default_value_t = false)]
|
||||||
|
pub(crate) universal_output: bool,
|
||||||
|
|
||||||
#[arg(help = "Video resolution")]
|
#[arg(help = "Video resolution")]
|
||||||
#[arg(long_help = "The video resolution.\
|
#[arg(long_help = "The video resolution. \
|
||||||
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
|
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
|
||||||
Specifying the exact pixels is not recommended, use one of the other options instead. \
|
Specifying the exact pixels is not recommended, use one of the other options instead. \
|
||||||
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
|
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
|
||||||
|
|
@ -68,42 +89,117 @@ pub struct Archive {
|
||||||
pub(crate) resolution: Resolution,
|
pub(crate) resolution: Resolution,
|
||||||
|
|
||||||
#[arg(
|
#[arg(
|
||||||
help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio' and 'video'"
|
help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'sync', 'audio' and 'video'"
|
||||||
)]
|
)]
|
||||||
#[arg(
|
#[arg(
|
||||||
long_help = "Because of local restrictions (or other reasons) some episodes with different languages does not have the same length (e.g. when some scenes were cut out). \
|
long_help = "Because of local restrictions (or other reasons) some episodes with different languages does not have the same length (e.g. when some scenes were cut out). \
|
||||||
With this flag you can set the behavior when handling multiple language.
|
With this flag you can set the behavior when handling multiple language.
|
||||||
Valid options are 'audio' (stores one video and all other languages as audio only), 'video' (stores the video + audio for every language) and 'auto' (detects if videos differ in length: if so, behave like 'video' else like 'audio')"
|
Valid options are 'audio' (stores one video and all other languages as audio only), 'video' (stores the video + audio for every language), 'auto' (detects if videos differ in length: if so, behave like 'video' else like 'audio') and 'sync' (detects if videos differ in length: if so, tries to find the offset of matching audio parts and removes it from the beginning, otherwise it behaves like 'audio')"
|
||||||
)]
|
)]
|
||||||
#[arg(short, long, default_value = "auto")]
|
#[arg(short, long, default_value = "auto")]
|
||||||
#[arg(value_parser = MergeBehavior::parse)]
|
#[arg(value_parser = MergeBehavior::parse)]
|
||||||
pub(crate) merge: MergeBehavior,
|
pub(crate) merge: MergeBehavior,
|
||||||
|
#[arg(
|
||||||
|
help = "If the merge behavior is 'auto' or 'sync', consider videos to be of equal lengths if the difference in length is smaller than the specified milliseconds"
|
||||||
|
)]
|
||||||
|
#[arg(long, default_value_t = 200)]
|
||||||
|
pub(crate) merge_time_tolerance: u32,
|
||||||
|
#[arg(
|
||||||
|
help = "If the merge behavior is 'sync', specify the difference by which two fingerprints are considered equal, higher values can help when the algorithm fails"
|
||||||
|
)]
|
||||||
|
#[arg(long, default_value_t = 6)]
|
||||||
|
pub(crate) merge_sync_tolerance: u32,
|
||||||
|
#[arg(
|
||||||
|
help = "If the merge behavior is 'sync', specify the amount of offset determination runs from which the final offset is calculated, higher values will increase the time required but lead to more precise offsets"
|
||||||
|
)]
|
||||||
|
#[arg(long, default_value_t = 4)]
|
||||||
|
pub(crate) merge_sync_precision: u32,
|
||||||
|
|
||||||
#[arg(help = format!("Presets for video converting. Can be used multiple times. \
|
#[arg(
|
||||||
|
help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
|
||||||
|
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard)"
|
||||||
|
)]
|
||||||
|
#[arg(
|
||||||
|
long_help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
|
||||||
|
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard; you might run in issues as there are multiple locales which resolve to the same IETF language code, e.g. 'es-LA' and 'es-ES' are both resolving to 'es')"
|
||||||
|
)]
|
||||||
|
#[arg(long)]
|
||||||
|
#[arg(value_parser = LanguageTagging::parse)]
|
||||||
|
pub(crate) language_tagging: Option<LanguageTagging>,
|
||||||
|
|
||||||
|
#[arg(help = format!("Presets for converting the video to a specific coding format. \
|
||||||
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
||||||
#[arg(long_help = format!("Presets for video converting. Can be used multiple times. \
|
#[arg(long_help = format!("Presets for converting the video to a specific coding format. \
|
||||||
Generally used to minify the file size with keeping (nearly) the same quality. \
|
If you need more specific ffmpeg customizations you can pass ffmpeg output arguments instead of a preset as value. \
|
||||||
It is recommended to only use this if you archive videos with high resolutions since low resolution videos tend to result in a larger file with any of the provided presets. \
|
|
||||||
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
#[arg(value_parser = FFmpegPreset::parse)]
|
#[arg(value_parser = FFmpegPreset::parse)]
|
||||||
pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
|
pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
|
||||||
|
#[arg(
|
||||||
|
help = "The number of threads used by ffmpeg to generate the output file. Does not work with every codec/preset"
|
||||||
|
)]
|
||||||
|
#[arg(
|
||||||
|
long_help = "The number of threads used by ffmpeg to generate the output file. \
|
||||||
|
Does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`. \
|
||||||
|
By default, ffmpeg chooses the thread count which works best for the output codec"
|
||||||
|
)]
|
||||||
|
#[arg(long)]
|
||||||
|
pub(crate) ffmpeg_threads: Option<usize>,
|
||||||
|
|
||||||
#[arg(
|
#[arg(
|
||||||
help = "Set which subtitle language should be set as default / auto shown when starting a video"
|
help = "Set which subtitle language should be set as default / auto shown when starting a video"
|
||||||
)]
|
)]
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
pub(crate) default_subtitle: Option<Locale>,
|
pub(crate) default_subtitle: Option<Locale>,
|
||||||
|
#[arg(help = "Include fonts in the downloaded file")]
|
||||||
|
#[arg(long)]
|
||||||
|
pub(crate) include_fonts: bool,
|
||||||
|
#[arg(
|
||||||
|
help = "Includes chapters (e.g. intro, credits, ...). Only works if `--merge` is set to 'audio'"
|
||||||
|
)]
|
||||||
|
#[arg(
|
||||||
|
long_help = "Includes chapters (e.g. intro, credits, ...). . Only works if `--merge` is set to 'audio'. \
|
||||||
|
Because chapters are essentially only special timeframes in episodes like the intro, most of the video timeline isn't covered by a chapter.
|
||||||
|
These \"gaps\" are filled with an 'Episode' chapter because many video players are ignore those gaps and just assume that a chapter ends when the next chapter start is reached, even if a specific end-time is set.
|
||||||
|
Also chapters aren't always available, so in this case, just a big 'Episode' chapter from start to end will be created"
|
||||||
|
)]
|
||||||
|
#[arg(long, default_value_t = false)]
|
||||||
|
pub(crate) include_chapters: bool,
|
||||||
|
|
||||||
#[arg(help = "Skip files which are already existing")]
|
#[arg(help = "Omit closed caption subtitles in the downloaded file")]
|
||||||
|
#[arg(long, default_value_t = false)]
|
||||||
|
pub(crate) no_closed_caption: bool,
|
||||||
|
|
||||||
|
#[arg(help = "Skip files which are already existing by their name")]
|
||||||
#[arg(long, default_value_t = false)]
|
#[arg(long, default_value_t = false)]
|
||||||
pub(crate) skip_existing: bool,
|
pub(crate) skip_existing: bool,
|
||||||
|
#[arg(
|
||||||
|
help = "Only works in combination with `--skip-existing`. Sets the method how already existing files should be skipped. Valid methods are 'audio' and 'subtitle'"
|
||||||
|
)]
|
||||||
|
#[arg(long_help = "Only works in combination with `--skip-existing`. \
|
||||||
|
By default, already existing files are determined by their name and the download of the corresponding episode is skipped. \
|
||||||
|
With this flag you can modify this behavior. \
|
||||||
|
Valid options are 'audio' and 'subtitle' (if the file already exists but the audio/subtitle are less from what should be downloaded, the episode gets downloaded and the file overwritten).")]
|
||||||
|
#[arg(long, default_values_t = SkipExistingMethod::default())]
|
||||||
|
#[arg(value_parser = SkipExistingMethod::parse)]
|
||||||
|
pub(crate) skip_existing_method: Vec<SkipExistingMethod>,
|
||||||
|
#[arg(help = "Skip special episodes")]
|
||||||
|
#[arg(long, default_value_t = false)]
|
||||||
|
pub(crate) skip_specials: bool,
|
||||||
|
|
||||||
|
#[arg(help = "Skip any interactive input")]
|
||||||
|
#[arg(short, long, default_value_t = false)]
|
||||||
|
pub(crate) yes: bool,
|
||||||
|
|
||||||
|
#[arg(help = "The number of threads used to download")]
|
||||||
|
#[arg(short, long, default_value_t = num_cpus::get())]
|
||||||
|
pub(crate) threads: usize,
|
||||||
|
|
||||||
#[arg(help = "Crunchyroll series url(s)")]
|
#[arg(help = "Crunchyroll series url(s)")]
|
||||||
|
#[arg(required = true)]
|
||||||
pub(crate) urls: Vec<String>,
|
pub(crate) urls: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
|
||||||
impl Execute for Archive {
|
impl Execute for Archive {
|
||||||
fn pre_check(&mut self) -> Result<()> {
|
fn pre_check(&mut self) -> Result<()> {
|
||||||
if !has_ffmpeg() {
|
if !has_ffmpeg() {
|
||||||
|
|
@ -117,24 +213,61 @@ impl Execute for Archive {
|
||||||
&& self.output != "-"
|
&& self.output != "-"
|
||||||
{
|
{
|
||||||
bail!("File extension is not '.mkv'. Currently only matroska / '.mkv' files are supported")
|
bail!("File extension is not '.mkv'. Currently only matroska / '.mkv' files are supported")
|
||||||
|
} else if let Some(special_output) = &self.output_specials {
|
||||||
|
if PathBuf::from(special_output)
|
||||||
|
.extension()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string_lossy()
|
||||||
|
!= "mkv"
|
||||||
|
&& !is_special_file(special_output)
|
||||||
|
&& special_output != "-"
|
||||||
|
{
|
||||||
|
bail!("File extension for special episodes is not '.mkv'. Currently only matroska / '.mkv' files are supported")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !self.locale.is_empty() {
|
if self.include_chapters
|
||||||
warn!("The '-l' / '--locale' flag is deprecated, use '-a' / '--audio' instead");
|
&& !matches!(self.merge, MergeBehavior::Sync)
|
||||||
for locale in &self.locale {
|
&& !matches!(self.merge, MergeBehavior::Audio)
|
||||||
if !self.audio.contains(locale) {
|
{
|
||||||
self.audio.push(locale.clone())
|
bail!("`--include-chapters` can only be used if `--merge` is set to 'audio' or 'sync'")
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
if !self.skip_existing_method.is_empty() && !self.skip_existing {
|
||||||
|
warn!("`--skip-existing-method` has no effect if `--skip-existing` is not set")
|
||||||
}
|
}
|
||||||
|
|
||||||
self.audio = all_locale_in_locales(self.audio.clone());
|
self.audio = all_locale_in_locales(self.audio.clone());
|
||||||
self.subtitle = all_locale_in_locales(self.subtitle.clone());
|
self.subtitle = all_locale_in_locales(self.subtitle.clone());
|
||||||
|
|
||||||
|
if let Some(language_tagging) = &self.language_tagging {
|
||||||
|
self.audio = resolve_locales(&self.audio);
|
||||||
|
self.subtitle = resolve_locales(&self.subtitle);
|
||||||
|
self.output_audio_locales = language_tagging.convert_locales(&self.audio);
|
||||||
|
self.output_subtitle_locales = language_tagging.convert_locales(&self.subtitle);
|
||||||
|
} else {
|
||||||
|
self.output_audio_locales = self
|
||||||
|
.audio
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.map(|l| l.to_string())
|
||||||
|
.collect();
|
||||||
|
self.output_subtitle_locales = self
|
||||||
|
.subtitle
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.map(|l| l.to_string())
|
||||||
|
.collect();
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn execute(self, ctx: Context) -> Result<()> {
|
async fn execute(self, ctx: Context) -> Result<()> {
|
||||||
|
if !ctx.crunchy.premium().await {
|
||||||
|
warn!("You may not be able to download all requested videos when logging in anonymously or using a non-premium account")
|
||||||
|
}
|
||||||
|
|
||||||
let mut parsed_urls = vec![];
|
let mut parsed_urls = vec![];
|
||||||
|
|
||||||
for (i, url) in self.urls.clone().into_iter().enumerate() {
|
for (i, url) in self.urls.clone().into_iter().enumerate() {
|
||||||
|
|
@ -150,9 +283,55 @@ impl Execute for Archive {
|
||||||
|
|
||||||
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
|
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
|
||||||
let progress_handler = progress!("Fetching series details");
|
let progress_handler = progress!("Fetching series details");
|
||||||
let single_format_collection = ArchiveFilter::new(url_filter, self.clone())
|
let single_format_collection = Filter::new(
|
||||||
.visit(media_collection)
|
url_filter,
|
||||||
.await?;
|
self.audio.clone(),
|
||||||
|
self.subtitle.clone(),
|
||||||
|
|scope, locales| {
|
||||||
|
let audios = locales.into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ");
|
||||||
|
match scope {
|
||||||
|
FilterMediaScope::Series(series) => warn!("Series {} is not available with {} audio", series.title, audios),
|
||||||
|
FilterMediaScope::Season(season) => warn!("Season {} is not available with {} audio", season.season_number, audios),
|
||||||
|
FilterMediaScope::Episode(episodes) => {
|
||||||
|
if episodes.len() == 1 {
|
||||||
|
warn!("Episode {} is not available with {} audio", episodes[0].sequence_number, audios)
|
||||||
|
} else if episodes.len() == 2 {
|
||||||
|
warn!("Season {} is only available with {} audio from episode {} to {}", episodes[0].season_number, audios, episodes[0].sequence_number, episodes[1].sequence_number)
|
||||||
|
} else {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(true)
|
||||||
|
},
|
||||||
|
|scope, locales| {
|
||||||
|
let subtitles = locales.into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ");
|
||||||
|
match scope {
|
||||||
|
FilterMediaScope::Series(series) => warn!("Series {} is not available with {} subtitles", series.title, subtitles),
|
||||||
|
FilterMediaScope::Season(season) => warn!("Season {} is not available with {} subtitles", season.season_number, subtitles),
|
||||||
|
FilterMediaScope::Episode(episodes) => {
|
||||||
|
if episodes.len() == 1 {
|
||||||
|
warn!("Episode {} of season {} is not available with {} subtitles", episodes[0].sequence_number, episodes[0].season_title, subtitles)
|
||||||
|
} else if episodes.len() == 2 {
|
||||||
|
warn!("Season {} of season {} is only available with {} subtitles from episode {} to {}", episodes[0].season_number, episodes[0].season_title, subtitles, episodes[0].sequence_number, episodes[1].sequence_number)
|
||||||
|
} else {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(true)
|
||||||
|
},
|
||||||
|
|season| {
|
||||||
|
warn!("Skipping premium episodes in season {season}");
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
Format::has_relative_fmt(&self.output),
|
||||||
|
!self.yes,
|
||||||
|
self.skip_specials,
|
||||||
|
ctx.crunchy.premium().await,
|
||||||
|
)
|
||||||
|
.visit(media_collection)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if single_format_collection.is_empty() {
|
if single_format_collection.is_empty() {
|
||||||
progress_handler.stop(format!("Skipping url {} (no matching videos found)", i + 1));
|
progress_handler.stop(format!("Skipping url {} (no matching videos found)", i + 1));
|
||||||
|
|
@ -162,12 +341,31 @@ impl Execute for Archive {
|
||||||
|
|
||||||
single_format_collection.full_visual_output();
|
single_format_collection.full_visual_output();
|
||||||
|
|
||||||
let download_builder = DownloadBuilder::new()
|
let download_builder =
|
||||||
.default_subtitle(self.default_subtitle.clone())
|
DownloadBuilder::new(ctx.client.clone(), ctx.rate_limiter.clone())
|
||||||
.ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default())
|
.default_subtitle(self.default_subtitle.clone())
|
||||||
.output_format(Some("matroska".to_string()))
|
.download_fonts(self.include_fonts)
|
||||||
.audio_sort(Some(self.audio.clone()))
|
.ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default())
|
||||||
.subtitle_sort(Some(self.subtitle.clone()));
|
.ffmpeg_threads(self.ffmpeg_threads)
|
||||||
|
.output_format(Some("matroska".to_string()))
|
||||||
|
.audio_sort(Some(self.audio.clone()))
|
||||||
|
.subtitle_sort(Some(self.subtitle.clone()))
|
||||||
|
.no_closed_caption(self.no_closed_caption)
|
||||||
|
.merge_sync_tolerance(match self.merge {
|
||||||
|
MergeBehavior::Sync => Some(self.merge_sync_tolerance),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.merge_sync_precision(match self.merge {
|
||||||
|
MergeBehavior::Sync => Some(self.merge_sync_precision),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.threads(self.threads)
|
||||||
|
.audio_locale_output_map(
|
||||||
|
zip(self.audio.clone(), self.output_audio_locales.clone()).collect(),
|
||||||
|
)
|
||||||
|
.subtitle_locale_output_map(
|
||||||
|
zip(self.subtitle.clone(), self.output_subtitle_locales.clone()).collect(),
|
||||||
|
);
|
||||||
|
|
||||||
for single_formats in single_format_collection.into_iter() {
|
for single_formats in single_format_collection.into_iter() {
|
||||||
let (download_formats, mut format) = get_format(&self, &single_formats).await?;
|
let (download_formats, mut format) = get_format(&self, &single_formats).await?;
|
||||||
|
|
@ -177,15 +375,79 @@ impl Execute for Archive {
|
||||||
downloader.add_format(download_format)
|
downloader.add_format(download_format)
|
||||||
}
|
}
|
||||||
|
|
||||||
let formatted_path = format.format_path((&self.output).into(), true);
|
let formatted_path = if format.is_special() {
|
||||||
let (path, changed) = free_file(formatted_path.clone());
|
format.format_path(
|
||||||
|
self.output_specials
|
||||||
|
.as_ref()
|
||||||
|
.map_or((&self.output).into(), |so| so.into()),
|
||||||
|
self.universal_output,
|
||||||
|
self.language_tagging.as_ref(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format.format_path(
|
||||||
|
(&self.output).into(),
|
||||||
|
self.universal_output,
|
||||||
|
self.language_tagging.as_ref(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let (mut path, changed) = free_file(formatted_path.clone());
|
||||||
|
|
||||||
if changed && self.skip_existing {
|
if changed && self.skip_existing {
|
||||||
debug!(
|
let mut skip = true;
|
||||||
"Skipping already existing file '{}'",
|
|
||||||
formatted_path.to_string_lossy()
|
if !self.skip_existing_method.is_empty() {
|
||||||
);
|
if let Some((audio_locales, subtitle_locales)) =
|
||||||
continue;
|
get_video_streams(&formatted_path)?
|
||||||
|
{
|
||||||
|
let method_audio = self
|
||||||
|
.skip_existing_method
|
||||||
|
.contains(&SkipExistingMethod::Audio);
|
||||||
|
let method_subtitle = self
|
||||||
|
.skip_existing_method
|
||||||
|
.contains(&SkipExistingMethod::Subtitle);
|
||||||
|
|
||||||
|
let audio_differ = if method_audio {
|
||||||
|
format
|
||||||
|
.locales
|
||||||
|
.iter()
|
||||||
|
.any(|(a, _)| !audio_locales.contains(a))
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
let subtitle_differ = if method_subtitle {
|
||||||
|
format
|
||||||
|
.locales
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|(a, mut s)| {
|
||||||
|
// remove the closed caption if the flag is given to omit
|
||||||
|
// closed captions
|
||||||
|
if self.no_closed_caption && a != Locale::ja_JP {
|
||||||
|
s.retain(|l| l != &a)
|
||||||
|
}
|
||||||
|
s
|
||||||
|
})
|
||||||
|
.any(|l| !subtitle_locales.contains(&l))
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
|
if (method_audio && audio_differ)
|
||||||
|
|| (method_subtitle && subtitle_differ)
|
||||||
|
{
|
||||||
|
skip = false;
|
||||||
|
path.clone_from(&formatted_path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if skip {
|
||||||
|
debug!(
|
||||||
|
"Skipping already existing file '{}'",
|
||||||
|
formatted_path.to_string_lossy()
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
format.locales.sort_by(|(a, _), (b, _)| {
|
format.locales.sort_by(|(a, _), (b, _)| {
|
||||||
|
|
@ -205,7 +467,7 @@ impl Execute for Archive {
|
||||||
|
|
||||||
format.visual_output(&path);
|
format.visual_output(&path);
|
||||||
|
|
||||||
downloader.download(&ctx, &path).await?
|
downloader.download(&path).await?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -213,6 +475,36 @@ impl Execute for Archive {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
pub(crate) enum SkipExistingMethod {
|
||||||
|
Audio,
|
||||||
|
Subtitle,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for SkipExistingMethod {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let value = match self {
|
||||||
|
SkipExistingMethod::Audio => "audio",
|
||||||
|
SkipExistingMethod::Subtitle => "subtitle",
|
||||||
|
};
|
||||||
|
write!(f, "{}", value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SkipExistingMethod {
|
||||||
|
fn parse(s: &str) -> Result<Self, String> {
|
||||||
|
match s.to_lowercase().as_str() {
|
||||||
|
"audio" => Ok(Self::Audio),
|
||||||
|
"subtitle" => Ok(Self::Subtitle),
|
||||||
|
_ => Err(format!("invalid skip existing method '{}'", s)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default<'a>() -> &'a [Self] {
|
||||||
|
&[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async fn get_format(
|
async fn get_format(
|
||||||
archive: &Archive,
|
archive: &Archive,
|
||||||
single_formats: &Vec<SingleFormat>,
|
single_formats: &Vec<SingleFormat>,
|
||||||
|
|
@ -222,7 +514,9 @@ async fn get_format(
|
||||||
|
|
||||||
for single_format in single_formats {
|
for single_format in single_formats {
|
||||||
let stream = single_format.stream().await?;
|
let stream = single_format.stream().await?;
|
||||||
let Some((video, audio)) = variant_data_from_stream(&stream, &archive.resolution).await? else {
|
let Some((video, audio, _)) =
|
||||||
|
stream_data_from_stream(&stream, &archive.resolution, None).await?
|
||||||
|
else {
|
||||||
if single_format.is_episode() {
|
if single_format.is_episode() {
|
||||||
bail!(
|
bail!(
|
||||||
"Resolution ({}) is not available for episode {} ({}) of {} season {}",
|
"Resolution ({}) is not available for episode {} ({}) of {} season {}",
|
||||||
|
|
@ -245,24 +539,29 @@ async fn get_format(
|
||||||
let subtitles: Vec<(Subtitle, bool)> = archive
|
let subtitles: Vec<(Subtitle, bool)> = archive
|
||||||
.subtitle
|
.subtitle
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|s| {
|
.flat_map(|s| {
|
||||||
stream
|
let mut subtitles = vec![];
|
||||||
.subtitles
|
if let Some(caption) = stream.captions.get(s) {
|
||||||
.get(s)
|
subtitles.push((caption.clone(), true))
|
||||||
.cloned()
|
}
|
||||||
// the subtitle is probably not cc if the audio is japanese or more than one
|
if let Some(subtitle) = stream.subtitles.get(s) {
|
||||||
// subtitle exists for this stream
|
// the subtitle is probably cc if the audio is not japanese or only one subtitle
|
||||||
.map(|l| {
|
// exists for this stream
|
||||||
(
|
let cc = single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1;
|
||||||
l,
|
// only include the subtitles if no cc subtitle is already present or if it's
|
||||||
single_format.audio == Locale::ja_JP || stream.subtitles.len() > 1,
|
// not cc
|
||||||
)
|
if subtitles.is_empty() || !cc {
|
||||||
})
|
subtitles.push((subtitle.clone(), cc))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
subtitles
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
format_pairs.push((single_format, video.clone(), audio, subtitles.clone()));
|
format_pairs.push((single_format, video.clone(), audio, subtitles.clone()));
|
||||||
single_format_to_format_pairs.push((single_format.clone(), video, subtitles))
|
single_format_to_format_pairs.push((single_format.clone(), video, subtitles));
|
||||||
|
|
||||||
|
stream.invalidate().await?
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut download_formats = vec![];
|
let mut download_formats = vec![];
|
||||||
|
|
@ -274,13 +573,14 @@ async fn get_format(
|
||||||
video: (video, single_format.audio.clone()),
|
video: (video, single_format.audio.clone()),
|
||||||
audios: vec![(audio, single_format.audio.clone())],
|
audios: vec![(audio, single_format.audio.clone())],
|
||||||
subtitles,
|
subtitles,
|
||||||
|
metadata: DownloadFormatMetadata { skip_events: None },
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
MergeBehavior::Audio => download_formats.push(DownloadFormat {
|
MergeBehavior::Audio => download_formats.push(DownloadFormat {
|
||||||
video: (
|
video: (
|
||||||
(*format_pairs.first().unwrap()).1.clone(),
|
format_pairs.first().unwrap().1.clone(),
|
||||||
(*format_pairs.first().unwrap()).0.audio.clone(),
|
format_pairs.first().unwrap().0.audio.clone(),
|
||||||
),
|
),
|
||||||
audios: format_pairs
|
audios: format_pairs
|
||||||
.iter()
|
.iter()
|
||||||
|
|
@ -292,28 +592,62 @@ async fn get_format(
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|(_, _, _, subtitles)| subtitles.clone())
|
.flat_map(|(_, _, _, subtitles)| subtitles.clone())
|
||||||
.collect(),
|
.collect(),
|
||||||
|
metadata: DownloadFormatMetadata {
|
||||||
|
skip_events: if archive.include_chapters {
|
||||||
|
format_pairs.first().unwrap().0.skip_events().await?
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
MergeBehavior::Auto => {
|
MergeBehavior::Auto | MergeBehavior::Sync => {
|
||||||
let mut d_formats: HashMap<Duration, DownloadFormat> = HashMap::new();
|
let mut d_formats: Vec<(Duration, DownloadFormat)> = vec![];
|
||||||
|
|
||||||
for (single_format, video, audio, subtitles) in format_pairs {
|
for (single_format, video, audio, subtitles) in format_pairs {
|
||||||
if let Some(d_format) = d_formats.get_mut(&single_format.duration) {
|
let closest_format = d_formats.iter_mut().min_by(|(x, _), (y, _)| {
|
||||||
d_format.audios.push((audio, single_format.audio.clone()));
|
x.sub(single_format.duration)
|
||||||
d_format.subtitles.extend(subtitles)
|
.abs()
|
||||||
} else {
|
.cmp(&y.sub(single_format.duration).abs())
|
||||||
d_formats.insert(
|
});
|
||||||
single_format.duration,
|
|
||||||
DownloadFormat {
|
match closest_format {
|
||||||
video: (video, single_format.audio.clone()),
|
Some(closest_format)
|
||||||
audios: vec![(audio, single_format.audio.clone())],
|
if closest_format
|
||||||
subtitles,
|
.0
|
||||||
},
|
.sub(single_format.duration)
|
||||||
);
|
.abs()
|
||||||
}
|
.num_milliseconds()
|
||||||
|
< archive.merge_time_tolerance.into() =>
|
||||||
|
{
|
||||||
|
// If less than `audio_error` apart, use same audio.
|
||||||
|
closest_format
|
||||||
|
.1
|
||||||
|
.audios
|
||||||
|
.push((audio, single_format.audio.clone()));
|
||||||
|
closest_format.1.subtitles.extend(subtitles);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
d_formats.push((
|
||||||
|
single_format.duration,
|
||||||
|
DownloadFormat {
|
||||||
|
video: (video, single_format.audio.clone()),
|
||||||
|
audios: vec![(audio, single_format.audio.clone())],
|
||||||
|
subtitles,
|
||||||
|
metadata: DownloadFormatMetadata {
|
||||||
|
skip_events: if archive.include_chapters {
|
||||||
|
single_format.skip_events().await?
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
for d_format in d_formats.into_values() {
|
for (_, d_format) in d_formats.into_iter() {
|
||||||
download_formats.push(d_format)
|
download_formats.push(d_format);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -323,3 +657,36 @@ async fn get_format(
|
||||||
Format::from_single_formats(single_format_to_format_pairs),
|
Format::from_single_formats(single_format_to_format_pairs),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_video_streams(path: &Path) -> Result<Option<(Vec<Locale>, Vec<Locale>)>> {
|
||||||
|
let video_streams =
|
||||||
|
Regex::new(r"(?m)Stream\s#\d+:\d+\((?P<language>.+)\):\s(?P<type>(Audio|Subtitle))")
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let ffmpeg = Command::new("ffmpeg")
|
||||||
|
.stdout(Stdio::null())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.arg("-hide_banner")
|
||||||
|
.args(["-i", &path.to_string_lossy()])
|
||||||
|
.output()?;
|
||||||
|
let ffmpeg_output = String::from_utf8(ffmpeg.stderr)?;
|
||||||
|
|
||||||
|
let mut audio = vec![];
|
||||||
|
let mut subtitle = vec![];
|
||||||
|
for cap in video_streams.captures_iter(&ffmpeg_output) {
|
||||||
|
let locale = cap.name("language").unwrap().as_str();
|
||||||
|
let type_ = cap.name("type").unwrap().as_str();
|
||||||
|
|
||||||
|
match type_ {
|
||||||
|
"Audio" => audio.push(Locale::from(locale.to_string())),
|
||||||
|
"Subtitle" => subtitle.push(Locale::from(locale.to_string())),
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if audio.is_empty() && subtitle.is_empty() {
|
||||||
|
Ok(None)
|
||||||
|
} else {
|
||||||
|
Ok(Some((audio, subtitle)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,290 +0,0 @@
|
||||||
use crate::archive::command::Archive;
|
|
||||||
use crate::utils::filter::{real_dedup_vec, Filter};
|
|
||||||
use crate::utils::format::{Format, SingleFormat, SingleFormatCollection};
|
|
||||||
use crate::utils::parse::UrlFilter;
|
|
||||||
use anyhow::Result;
|
|
||||||
use crunchyroll_rs::{Concert, Episode, Locale, Movie, MovieListing, MusicVideo, Season, Series};
|
|
||||||
use log::warn;
|
|
||||||
use std::collections::{BTreeMap, HashMap};
|
|
||||||
|
|
||||||
enum Visited {
|
|
||||||
Series,
|
|
||||||
Season,
|
|
||||||
None,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct ArchiveFilter {
|
|
||||||
url_filter: UrlFilter,
|
|
||||||
archive: Archive,
|
|
||||||
season_episode_count: HashMap<u32, Vec<String>>,
|
|
||||||
season_subtitles_missing: Vec<u32>,
|
|
||||||
visited: Visited,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ArchiveFilter {
|
|
||||||
pub(crate) fn new(url_filter: UrlFilter, archive: Archive) -> Self {
|
|
||||||
Self {
|
|
||||||
url_filter,
|
|
||||||
archive,
|
|
||||||
season_episode_count: HashMap::new(),
|
|
||||||
season_subtitles_missing: vec![],
|
|
||||||
visited: Visited::None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl Filter for ArchiveFilter {
|
|
||||||
type T = Vec<SingleFormat>;
|
|
||||||
type Output = SingleFormatCollection;
|
|
||||||
|
|
||||||
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>> {
|
|
||||||
// `series.audio_locales` isn't always populated b/c of crunchyrolls api. so check if the
|
|
||||||
// audio is matching only if the field is populated
|
|
||||||
if !series.audio_locales.is_empty() {
|
|
||||||
let missing_audio = missing_locales(&series.audio_locales, &self.archive.audio);
|
|
||||||
if !missing_audio.is_empty() {
|
|
||||||
warn!(
|
|
||||||
"Series {} is not available with {} audio",
|
|
||||||
series.title,
|
|
||||||
missing_audio
|
|
||||||
.into_iter()
|
|
||||||
.map(|l| l.to_string())
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", ")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
let missing_subtitle =
|
|
||||||
missing_locales(&series.subtitle_locales, &self.archive.subtitle);
|
|
||||||
if !missing_subtitle.is_empty() {
|
|
||||||
warn!(
|
|
||||||
"Series {} is not available with {} subtitles",
|
|
||||||
series.title,
|
|
||||||
missing_subtitle
|
|
||||||
.into_iter()
|
|
||||||
.map(|l| l.to_string())
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", ")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
self.visited = Visited::Series
|
|
||||||
}
|
|
||||||
Ok(series.seasons().await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_season(&mut self, mut season: Season) -> Result<Vec<Episode>> {
|
|
||||||
if !self.url_filter.is_season_valid(season.season_number) {
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut seasons = season.version(self.archive.audio.clone()).await?;
|
|
||||||
if self
|
|
||||||
.archive
|
|
||||||
.audio
|
|
||||||
.iter()
|
|
||||||
.any(|l| season.audio_locales.contains(l))
|
|
||||||
{
|
|
||||||
seasons.insert(0, season.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
if !matches!(self.visited, Visited::Series) {
|
|
||||||
let mut audio_locales: Vec<Locale> = seasons
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.audio_locales.clone())
|
|
||||||
.flatten()
|
|
||||||
.collect();
|
|
||||||
real_dedup_vec(&mut audio_locales);
|
|
||||||
let missing_audio = missing_locales(&audio_locales, &self.archive.audio);
|
|
||||||
if !missing_audio.is_empty() {
|
|
||||||
warn!(
|
|
||||||
"Season {} is not available with {} audio",
|
|
||||||
season.season_number,
|
|
||||||
missing_audio
|
|
||||||
.into_iter()
|
|
||||||
.map(|l| l.to_string())
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", ")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let subtitle_locales: Vec<Locale> = seasons
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.subtitle_locales.clone())
|
|
||||||
.flatten()
|
|
||||||
.collect();
|
|
||||||
let missing_subtitle = missing_locales(&subtitle_locales, &self.archive.subtitle);
|
|
||||||
if !missing_subtitle.is_empty() {
|
|
||||||
warn!(
|
|
||||||
"Season {} is not available with {} subtitles",
|
|
||||||
season.season_number,
|
|
||||||
missing_subtitle
|
|
||||||
.into_iter()
|
|
||||||
.map(|l| l.to_string())
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", ")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
self.visited = Visited::Season
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut episodes = vec![];
|
|
||||||
for season in seasons {
|
|
||||||
episodes.extend(season.episodes().await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
if Format::has_relative_episodes_fmt(&self.archive.output) {
|
|
||||||
for episode in episodes.iter() {
|
|
||||||
self.season_episode_count
|
|
||||||
.entry(episode.season_number)
|
|
||||||
.or_insert(vec![])
|
|
||||||
.push(episode.id.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(episodes)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_episode(&mut self, mut episode: Episode) -> Result<Option<Self::T>> {
|
|
||||||
if !self
|
|
||||||
.url_filter
|
|
||||||
.is_episode_valid(episode.episode_number, episode.season_number)
|
|
||||||
{
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut episodes = vec![];
|
|
||||||
if !matches!(self.visited, Visited::Series) && !matches!(self.visited, Visited::Season) {
|
|
||||||
if self.archive.audio.contains(&episode.audio_locale) {
|
|
||||||
episodes.push((episode.clone(), episode.subtitle_locales.clone()))
|
|
||||||
}
|
|
||||||
episodes.extend(
|
|
||||||
episode
|
|
||||||
.version(self.archive.audio.clone())
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|e| (e.clone(), e.subtitle_locales.clone())),
|
|
||||||
);
|
|
||||||
let audio_locales: Vec<Locale> = episodes
|
|
||||||
.iter()
|
|
||||||
.map(|(e, _)| e.audio_locale.clone())
|
|
||||||
.collect();
|
|
||||||
let missing_audio = missing_locales(&audio_locales, &self.archive.audio);
|
|
||||||
if !missing_audio.is_empty() {
|
|
||||||
warn!(
|
|
||||||
"Episode {} is not available with {} audio",
|
|
||||||
episode.episode_number,
|
|
||||||
missing_audio
|
|
||||||
.into_iter()
|
|
||||||
.map(|l| l.to_string())
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", ")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut subtitle_locales: Vec<Locale> =
|
|
||||||
episodes.iter().map(|(_, s)| s.clone()).flatten().collect();
|
|
||||||
real_dedup_vec(&mut subtitle_locales);
|
|
||||||
let missing_subtitles = missing_locales(&subtitle_locales, &self.archive.subtitle);
|
|
||||||
if !missing_subtitles.is_empty()
|
|
||||||
&& !self
|
|
||||||
.season_subtitles_missing
|
|
||||||
.contains(&episode.season_number)
|
|
||||||
{
|
|
||||||
warn!(
|
|
||||||
"Episode {} is not available with {} subtitles",
|
|
||||||
episode.episode_number,
|
|
||||||
missing_subtitles
|
|
||||||
.into_iter()
|
|
||||||
.map(|l| l.to_string())
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", ")
|
|
||||||
);
|
|
||||||
self.season_subtitles_missing.push(episode.season_number)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
episodes.push((episode.clone(), episode.subtitle_locales.clone()))
|
|
||||||
}
|
|
||||||
|
|
||||||
let relative_episode_number = if Format::has_relative_episodes_fmt(&self.archive.output) {
|
|
||||||
if self
|
|
||||||
.season_episode_count
|
|
||||||
.get(&episode.season_number)
|
|
||||||
.is_none()
|
|
||||||
{
|
|
||||||
let season_episodes = episode.season().await?.episodes().await?;
|
|
||||||
self.season_episode_count.insert(
|
|
||||||
episode.season_number,
|
|
||||||
season_episodes.into_iter().map(|e| e.id).collect(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let relative_episode_number = self
|
|
||||||
.season_episode_count
|
|
||||||
.get(&episode.season_number)
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.position(|id| id == &episode.id)
|
|
||||||
.map(|index| index + 1);
|
|
||||||
if relative_episode_number.is_none() {
|
|
||||||
warn!(
|
|
||||||
"Failed to get relative episode number for episode {} ({}) of {} season {}",
|
|
||||||
episode.episode_number,
|
|
||||||
episode.title,
|
|
||||||
episode.series_title,
|
|
||||||
episode.season_number,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
relative_episode_number
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Some(
|
|
||||||
episodes
|
|
||||||
.into_iter()
|
|
||||||
.map(|(e, s)| {
|
|
||||||
SingleFormat::new_from_episode(e, s, relative_episode_number.map(|n| n as u32))
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>> {
|
|
||||||
Ok(movie_listing.movies().await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_movie(&mut self, movie: Movie) -> Result<Option<Self::T>> {
|
|
||||||
Ok(Some(vec![SingleFormat::new_from_movie(movie, vec![])]))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Option<Self::T>> {
|
|
||||||
Ok(Some(vec![SingleFormat::new_from_music_video(music_video)]))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_concert(&mut self, concert: Concert) -> Result<Option<Self::T>> {
|
|
||||||
Ok(Some(vec![SingleFormat::new_from_concert(concert)]))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn finish(self, input: Vec<Self::T>) -> Result<Self::Output> {
|
|
||||||
let flatten_input: Self::T = input.into_iter().flatten().collect();
|
|
||||||
|
|
||||||
let mut single_format_collection = SingleFormatCollection::new();
|
|
||||||
|
|
||||||
let mut sorted: BTreeMap<(u32, String), Self::T> = BTreeMap::new();
|
|
||||||
for data in flatten_input {
|
|
||||||
sorted
|
|
||||||
.entry((data.season_number, data.sequence_number.to_string()))
|
|
||||||
.or_insert(vec![])
|
|
||||||
.push(data)
|
|
||||||
}
|
|
||||||
|
|
||||||
for data in sorted.into_values() {
|
|
||||||
single_format_collection.add_single_formats(data)
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(single_format_collection)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn missing_locales<'a>(available: &Vec<Locale>, searched: &'a Vec<Locale>) -> Vec<&'a Locale> {
|
|
||||||
searched.iter().filter(|p| !available.contains(p)).collect()
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
mod command;
|
mod command;
|
||||||
mod filter;
|
|
||||||
|
|
||||||
pub use command::Archive;
|
pub use command::Archive;
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,20 @@
|
||||||
use crate::download::filter::DownloadFilter;
|
|
||||||
use crate::utils::context::Context;
|
use crate::utils::context::Context;
|
||||||
use crate::utils::download::{DownloadBuilder, DownloadFormat};
|
use crate::utils::download::{DownloadBuilder, DownloadFormat, DownloadFormatMetadata};
|
||||||
use crate::utils::ffmpeg::FFmpegPreset;
|
use crate::utils::ffmpeg::{FFmpegPreset, SOFTSUB_CONTAINERS};
|
||||||
use crate::utils::filter::Filter;
|
use crate::utils::filter::{Filter, FilterMediaScope};
|
||||||
use crate::utils::format::{Format, SingleFormat};
|
use crate::utils::format::{Format, SingleFormat};
|
||||||
|
use crate::utils::locale::{resolve_locales, LanguageTagging};
|
||||||
use crate::utils::log::progress;
|
use crate::utils::log::progress;
|
||||||
use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
|
use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
|
||||||
use crate::utils::parse::parse_url;
|
use crate::utils::parse::parse_url;
|
||||||
use crate::utils::video::variant_data_from_stream;
|
use crate::utils::video::stream_data_from_stream;
|
||||||
use crate::Execute;
|
use crate::Execute;
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use crunchyroll_rs::media::Resolution;
|
use crunchyroll_rs::media::Resolution;
|
||||||
use crunchyroll_rs::Locale;
|
use crunchyroll_rs::Locale;
|
||||||
use log::{debug, warn};
|
use log::{debug, error, warn};
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
#[derive(Clone, Debug, clap::Parser)]
|
#[derive(Clone, Debug, clap::Parser)]
|
||||||
|
|
@ -23,34 +24,54 @@ pub struct Download {
|
||||||
#[arg(help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
|
#[arg(help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
|
||||||
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
#[arg(long_help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
|
#[arg(long_help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
|
||||||
Available languages are:\n{}", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
Available languages are:\n {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
||||||
#[arg(short, long, default_value_t = crate::utils::locale::system_locale())]
|
#[arg(short, long, default_value_t = crate::utils::locale::system_locale())]
|
||||||
pub(crate) audio: Locale,
|
pub(crate) audio: Locale,
|
||||||
|
#[arg(skip)]
|
||||||
|
output_audio_locale: String,
|
||||||
#[arg(help = format!("Subtitle language. Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
#[arg(help = format!("Subtitle language. Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
#[arg(long_help = format!("Subtitle language. If set, the subtitle will be burned into the video and cannot be disabled. \
|
#[arg(long_help = format!("Subtitle language. If set, the subtitle will be burned into the video and cannot be disabled. \
|
||||||
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
Available languages are: {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
#[arg(short, long)]
|
#[arg(short, long)]
|
||||||
pub(crate) subtitle: Option<Locale>,
|
pub(crate) subtitle: Option<Locale>,
|
||||||
|
#[arg(skip)]
|
||||||
|
output_subtitle_locale: String,
|
||||||
|
|
||||||
#[arg(help = "Name of the output file")]
|
#[arg(help = "Name of the output file")]
|
||||||
#[arg(long_help = "Name of the output file.\
|
#[arg(long_help = "Name of the output file. \
|
||||||
If you use one of the following pattern they will get replaced:\n \
|
If you use one of the following pattern they will get replaced:\n \
|
||||||
{title} → Title of the video\n \
|
{title} → Title of the video\n \
|
||||||
{series_name} → Name of the series\n \
|
{series_name} → Name of the series\n \
|
||||||
{season_name} → Name of the season\n \
|
{season_name} → Name of the season\n \
|
||||||
{audio} → Audio language of the video\n \
|
{audio} → Audio language of the video\n \
|
||||||
{resolution} → Resolution of the video\n \
|
{width} → Width of the video\n \
|
||||||
{season_number} → Number of the season\n \
|
{height} → Height of the video\n \
|
||||||
{episode_number} → Number of the episode\n \
|
{season_number} → Number of the season\n \
|
||||||
{relative_episode_number} → Number of the episode relative to its season\n \
|
{episode_number} → Number of the episode\n \
|
||||||
{series_id} → ID of the series\n \
|
{relative_episode_number} → Number of the episode relative to its season\n \
|
||||||
{season_id} → ID of the season\n \
|
{sequence_number} → Like '{episode_number}' but without possible non-number characters\n \
|
||||||
{episode_id} → ID of the episode")]
|
{relative_sequence_number} → Like '{relative_episode_number}' but with support for episode 0's and .5's\n \
|
||||||
|
{release_year} → Release year of the video\n \
|
||||||
|
{release_month} → Release month of the video\n \
|
||||||
|
{release_day} → Release day of the video\n \
|
||||||
|
{series_id} → ID of the series\n \
|
||||||
|
{season_id} → ID of the season\n \
|
||||||
|
{episode_id} → ID of the episode")]
|
||||||
#[arg(short, long, default_value = "{title}.mp4")]
|
#[arg(short, long, default_value = "{title}.mp4")]
|
||||||
pub(crate) output: String,
|
pub(crate) output: String,
|
||||||
|
#[arg(help = "Name of the output file if the episode is a special")]
|
||||||
|
#[arg(long_help = "Name of the output file if the episode is a special. \
|
||||||
|
If not set, the '-o'/'--output' flag will be used as name template")]
|
||||||
|
#[arg(long)]
|
||||||
|
pub(crate) output_specials: Option<String>,
|
||||||
|
|
||||||
|
#[arg(help = "Sanitize the output file for use with all operating systems. \
|
||||||
|
This option only affects template options and not static characters.")]
|
||||||
|
#[arg(long, default_value_t = false)]
|
||||||
|
pub(crate) universal_output: bool,
|
||||||
|
|
||||||
#[arg(help = "Video resolution")]
|
#[arg(help = "Video resolution")]
|
||||||
#[arg(long_help = "The video resolution.\
|
#[arg(long_help = "The video resolution. \
|
||||||
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
|
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
|
||||||
Specifying the exact pixels is not recommended, use one of the other options instead. \
|
Specifying the exact pixels is not recommended, use one of the other options instead. \
|
||||||
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
|
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
|
||||||
|
|
@ -59,25 +80,69 @@ pub struct Download {
|
||||||
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
|
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
|
||||||
pub(crate) resolution: Resolution,
|
pub(crate) resolution: Resolution,
|
||||||
|
|
||||||
#[arg(help = format!("Presets for video converting. Can be used multiple times. \
|
#[arg(
|
||||||
|
long,
|
||||||
|
help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
|
||||||
|
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard)"
|
||||||
|
)]
|
||||||
|
#[arg(
|
||||||
|
long_help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
|
||||||
|
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard; you might run in issues as there are multiple locales which resolve to the same IETF language code, e.g. 'es-LA' and 'es-ES' are both resolving to 'es')"
|
||||||
|
)]
|
||||||
|
#[arg(value_parser = LanguageTagging::parse)]
|
||||||
|
pub(crate) language_tagging: Option<LanguageTagging>,
|
||||||
|
|
||||||
|
#[arg(help = format!("Presets for converting the video to a specific coding format. \
|
||||||
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
||||||
#[arg(long_help = format!("Presets for video converting. Can be used multiple times. \
|
#[arg(long_help = format!("Presets for converting the video to a specific coding format. \
|
||||||
Generally used to minify the file size with keeping (nearly) the same quality. \
|
If you need more specific ffmpeg customizations you can pass ffmpeg output arguments instead of a preset as value. \
|
||||||
It is recommended to only use this if you download videos with high resolutions since low resolution videos tend to result in a larger file with any of the provided presets. \
|
|
||||||
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
#[arg(value_parser = FFmpegPreset::parse)]
|
#[arg(value_parser = FFmpegPreset::parse)]
|
||||||
pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
|
pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
|
||||||
|
#[arg(
|
||||||
|
help = "The number of threads used by ffmpeg to generate the output file. Does not work with every codec/preset"
|
||||||
|
)]
|
||||||
|
#[arg(
|
||||||
|
long_help = "The number of threads used by ffmpeg to generate the output file. \
|
||||||
|
Does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`. \
|
||||||
|
By default, ffmpeg chooses the thread count which works best for the output codec"
|
||||||
|
)]
|
||||||
|
#[arg(long)]
|
||||||
|
pub(crate) ffmpeg_threads: Option<usize>,
|
||||||
|
|
||||||
#[arg(help = "Skip files which are already existing")]
|
#[arg(help = "Skip files which are already existing by their name")]
|
||||||
#[arg(long, default_value_t = false)]
|
#[arg(long, default_value_t = false)]
|
||||||
pub(crate) skip_existing: bool,
|
pub(crate) skip_existing: bool,
|
||||||
|
#[arg(help = "Skip special episodes")]
|
||||||
|
#[arg(long, default_value_t = false)]
|
||||||
|
pub(crate) skip_specials: bool,
|
||||||
|
|
||||||
|
#[arg(help = "Includes chapters (e.g. intro, credits, ...)")]
|
||||||
|
#[arg(long_help = "Includes chapters (e.g. intro, credits, ...). \
|
||||||
|
Because chapters are essentially only special timeframes in episodes like the intro, most of the video timeline isn't covered by a chapter.
|
||||||
|
These \"gaps\" are filled with an 'Episode' chapter because many video players are ignore those gaps and just assume that a chapter ends when the next chapter start is reached, even if a specific end-time is set.
|
||||||
|
Also chapters aren't always available, so in this case, just a big 'Episode' chapter from start to end will be created")]
|
||||||
|
#[arg(long, default_value_t = false)]
|
||||||
|
pub(crate) include_chapters: bool,
|
||||||
|
|
||||||
|
#[arg(help = "Skip any interactive input")]
|
||||||
|
#[arg(short, long, default_value_t = false)]
|
||||||
|
pub(crate) yes: bool,
|
||||||
|
|
||||||
|
#[arg(help = "Force subtitles to be always burnt-in")]
|
||||||
|
#[arg(long, default_value_t = false)]
|
||||||
|
pub(crate) force_hardsub: bool,
|
||||||
|
|
||||||
|
#[arg(help = "The number of threads used to download")]
|
||||||
|
#[arg(short, long, default_value_t = num_cpus::get())]
|
||||||
|
pub(crate) threads: usize,
|
||||||
|
|
||||||
#[arg(help = "Url(s) to Crunchyroll episodes or series")]
|
#[arg(help = "Url(s) to Crunchyroll episodes or series")]
|
||||||
|
#[arg(required = true)]
|
||||||
pub(crate) urls: Vec<String>,
|
pub(crate) urls: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
|
||||||
impl Execute for Download {
|
impl Execute for Download {
|
||||||
fn pre_check(&mut self) -> Result<()> {
|
fn pre_check(&mut self) -> Result<()> {
|
||||||
if !has_ffmpeg() {
|
if !has_ffmpeg() {
|
||||||
|
|
@ -94,18 +159,83 @@ impl Execute for Download {
|
||||||
|
|
||||||
if self.subtitle.is_some() {
|
if self.subtitle.is_some() {
|
||||||
if let Some(ext) = Path::new(&self.output).extension() {
|
if let Some(ext) = Path::new(&self.output).extension() {
|
||||||
if ext.to_string_lossy() != "mp4" {
|
if self.force_hardsub {
|
||||||
warn!("Detected a non mp4 output container. Adding subtitles may take a while")
|
warn!("Hardsubs are forced. Adding subtitles may take a while")
|
||||||
|
} else if !["mkv", "mov", "mp4"].contains(&ext.to_string_lossy().as_ref()) {
|
||||||
|
warn!("Detected a container which does not support softsubs. Adding subtitles may take a while")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(special_output) = &self.output_specials {
|
||||||
|
if Path::new(special_output)
|
||||||
|
.extension()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.is_empty()
|
||||||
|
&& !is_special_file(special_output)
|
||||||
|
&& special_output != "-"
|
||||||
|
{
|
||||||
|
bail!("No file extension found. Please specify a file extension (via `--output-specials`) for the output file")
|
||||||
|
}
|
||||||
|
if let Some(ext) = Path::new(special_output).extension() {
|
||||||
|
if self.force_hardsub {
|
||||||
|
warn!("Hardsubs are forced for special episodes. Adding subtitles may take a while")
|
||||||
|
} else if !["mkv", "mov", "mp4"].contains(&ext.to_string_lossy().as_ref()) {
|
||||||
|
warn!("Detected a container which does not support softsubs. Adding subtitles for special episodes may take a while")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(language_tagging) = &self.language_tagging {
|
||||||
|
self.audio = resolve_locales(&[self.audio.clone()]).remove(0);
|
||||||
|
self.subtitle = self
|
||||||
|
.subtitle
|
||||||
|
.as_ref()
|
||||||
|
.map(|s| resolve_locales(&[s.clone()]).remove(0));
|
||||||
|
self.output_audio_locale = language_tagging.for_locale(&self.audio);
|
||||||
|
self.output_subtitle_locale = self
|
||||||
|
.subtitle
|
||||||
|
.as_ref()
|
||||||
|
.map(|s| language_tagging.for_locale(s))
|
||||||
|
.unwrap_or_default()
|
||||||
|
} else {
|
||||||
|
self.output_audio_locale = self.audio.to_string();
|
||||||
|
self.output_subtitle_locale = self
|
||||||
|
.subtitle
|
||||||
|
.as_ref()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.unwrap_or_default();
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn execute(self, ctx: Context) -> Result<()> {
|
async fn execute(self, ctx: Context) -> Result<()> {
|
||||||
|
if !ctx.crunchy.premium().await {
|
||||||
|
warn!("You may not be able to download all requested videos when logging in anonymously or using a non-premium account")
|
||||||
|
}
|
||||||
|
|
||||||
let mut parsed_urls = vec![];
|
let mut parsed_urls = vec![];
|
||||||
|
|
||||||
|
let output_supports_softsubs = SOFTSUB_CONTAINERS.contains(
|
||||||
|
&Path::new(&self.output)
|
||||||
|
.extension()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string_lossy()
|
||||||
|
.as_ref(),
|
||||||
|
);
|
||||||
|
let special_output_supports_softsubs = if let Some(so) = &self.output_specials {
|
||||||
|
SOFTSUB_CONTAINERS.contains(
|
||||||
|
&Path::new(so)
|
||||||
|
.extension()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string_lossy()
|
||||||
|
.as_ref(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
output_supports_softsubs
|
||||||
|
};
|
||||||
|
|
||||||
for (i, url) in self.urls.clone().into_iter().enumerate() {
|
for (i, url) in self.urls.clone().into_iter().enumerate() {
|
||||||
let progress_handler = progress!("Parsing url {}", i + 1);
|
let progress_handler = progress!("Parsing url {}", i + 1);
|
||||||
match parse_url(&ctx.crunchy, url.clone(), true).await {
|
match parse_url(&ctx.crunchy, url.clone(), true).await {
|
||||||
|
|
@ -119,9 +249,59 @@ impl Execute for Download {
|
||||||
|
|
||||||
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
|
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
|
||||||
let progress_handler = progress!("Fetching series details");
|
let progress_handler = progress!("Fetching series details");
|
||||||
let single_format_collection = DownloadFilter::new(url_filter, self.clone())
|
let single_format_collection = Filter::new(
|
||||||
.visit(media_collection)
|
url_filter,
|
||||||
.await?;
|
vec![self.audio.clone()],
|
||||||
|
self.subtitle.as_ref().map_or(vec![], |s| vec![s.clone()]),
|
||||||
|
|scope, locales| {
|
||||||
|
match scope {
|
||||||
|
FilterMediaScope::Series(series) => bail!("Series {} is not available with {} audio", series.title, locales[0]),
|
||||||
|
FilterMediaScope::Season(season) => {
|
||||||
|
error!("Season {} is not available with {} audio", season.season_number, locales[0]);
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
FilterMediaScope::Episode(episodes) => {
|
||||||
|
if episodes.len() == 1 {
|
||||||
|
warn!("Episode {} of season {} is not available with {} audio", episodes[0].sequence_number, episodes[0].season_title, locales[0])
|
||||||
|
} else if episodes.len() == 2 {
|
||||||
|
warn!("Season {} is only available with {} audio from episode {} to {}", episodes[0].season_number, locales[0], episodes[0].sequence_number, episodes[1].sequence_number)
|
||||||
|
} else {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|scope, locales| {
|
||||||
|
match scope {
|
||||||
|
FilterMediaScope::Series(series) => bail!("Series {} is not available with {} subtitles", series.title, locales[0]),
|
||||||
|
FilterMediaScope::Season(season) => {
|
||||||
|
warn!("Season {} is not available with {} subtitles", season.season_number, locales[0]);
|
||||||
|
Ok(false)
|
||||||
|
},
|
||||||
|
FilterMediaScope::Episode(episodes) => {
|
||||||
|
if episodes.len() == 1 {
|
||||||
|
warn!("Episode {} of season {} is not available with {} subtitles", episodes[0].sequence_number, episodes[0].season_title, locales[0])
|
||||||
|
} else if episodes.len() == 2 {
|
||||||
|
warn!("Season {} is only available with {} subtitles from episode {} to {}", episodes[0].season_number, locales[0], episodes[0].sequence_number, episodes[1].sequence_number)
|
||||||
|
} else {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|season| {
|
||||||
|
warn!("Skipping premium episodes in season {season}");
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
Format::has_relative_fmt(&self.output),
|
||||||
|
!self.yes,
|
||||||
|
self.skip_specials,
|
||||||
|
ctx.crunchy.premium().await,
|
||||||
|
)
|
||||||
|
.visit(media_collection)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if single_format_collection.is_empty() {
|
if single_format_collection.is_empty() {
|
||||||
progress_handler.stop(format!("Skipping url {} (no matching videos found)", i + 1));
|
progress_handler.stop(format!("Skipping url {} (no matching videos found)", i + 1));
|
||||||
|
|
@ -131,24 +311,63 @@ impl Execute for Download {
|
||||||
|
|
||||||
single_format_collection.full_visual_output();
|
single_format_collection.full_visual_output();
|
||||||
|
|
||||||
let download_builder = DownloadBuilder::new()
|
let download_builder =
|
||||||
.default_subtitle(self.subtitle.clone())
|
DownloadBuilder::new(ctx.client.clone(), ctx.rate_limiter.clone())
|
||||||
.output_format(if is_special_file(&self.output) || self.output == "-" {
|
.default_subtitle(self.subtitle.clone())
|
||||||
Some("mpegts".to_string())
|
.force_hardsub(self.force_hardsub)
|
||||||
} else {
|
.output_format(if is_special_file(&self.output) || self.output == "-" {
|
||||||
None
|
Some("mpegts".to_string())
|
||||||
});
|
} else {
|
||||||
|
None
|
||||||
|
})
|
||||||
|
.ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default())
|
||||||
|
.ffmpeg_threads(self.ffmpeg_threads)
|
||||||
|
.threads(self.threads)
|
||||||
|
.audio_locale_output_map(HashMap::from([(
|
||||||
|
self.audio.clone(),
|
||||||
|
self.output_audio_locale.clone(),
|
||||||
|
)]))
|
||||||
|
.subtitle_locale_output_map(
|
||||||
|
self.subtitle.as_ref().map_or(HashMap::new(), |s| {
|
||||||
|
HashMap::from([(s.clone(), self.output_subtitle_locale.clone())])
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
for mut single_formats in single_format_collection.into_iter() {
|
for mut single_formats in single_format_collection.into_iter() {
|
||||||
// the vec contains always only one item
|
// the vec contains always only one item
|
||||||
let single_format = single_formats.remove(0);
|
let single_format = single_formats.remove(0);
|
||||||
|
|
||||||
let (download_format, format) = get_format(&self, &single_format).await?;
|
let (download_format, format) = get_format(
|
||||||
|
&self,
|
||||||
|
&single_format,
|
||||||
|
if self.force_hardsub {
|
||||||
|
true
|
||||||
|
} else if single_format.is_special() {
|
||||||
|
!special_output_supports_softsubs
|
||||||
|
} else {
|
||||||
|
!output_supports_softsubs
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let mut downloader = download_builder.clone().build();
|
let mut downloader = download_builder.clone().build();
|
||||||
downloader.add_format(download_format);
|
downloader.add_format(download_format);
|
||||||
|
|
||||||
let formatted_path = format.format_path((&self.output).into(), true);
|
let formatted_path = if format.is_special() {
|
||||||
|
format.format_path(
|
||||||
|
self.output_specials
|
||||||
|
.as_ref()
|
||||||
|
.map_or((&self.output).into(), |so| so.into()),
|
||||||
|
self.universal_output,
|
||||||
|
self.language_tagging.as_ref(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format.format_path(
|
||||||
|
(&self.output).into(),
|
||||||
|
self.universal_output,
|
||||||
|
self.language_tagging.as_ref(),
|
||||||
|
)
|
||||||
|
};
|
||||||
let (path, changed) = free_file(formatted_path.clone());
|
let (path, changed) = free_file(formatted_path.clone());
|
||||||
|
|
||||||
if changed && self.skip_existing {
|
if changed && self.skip_existing {
|
||||||
|
|
@ -161,7 +380,7 @@ impl Execute for Download {
|
||||||
|
|
||||||
format.visual_output(&path);
|
format.visual_output(&path);
|
||||||
|
|
||||||
downloader.download(&ctx, &path).await?
|
downloader.download(&path).await?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -172,9 +391,20 @@ impl Execute for Download {
|
||||||
async fn get_format(
|
async fn get_format(
|
||||||
download: &Download,
|
download: &Download,
|
||||||
single_format: &SingleFormat,
|
single_format: &SingleFormat,
|
||||||
|
try_peer_hardsubs: bool,
|
||||||
) -> Result<(DownloadFormat, Format)> {
|
) -> Result<(DownloadFormat, Format)> {
|
||||||
let stream = single_format.stream().await?;
|
let stream = single_format.stream().await?;
|
||||||
let Some((video, audio)) = variant_data_from_stream(&stream, &download.resolution).await? else {
|
let Some((video, audio, contains_hardsub)) = stream_data_from_stream(
|
||||||
|
&stream,
|
||||||
|
&download.resolution,
|
||||||
|
if try_peer_hardsubs {
|
||||||
|
download.subtitle.clone()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
else {
|
||||||
if single_format.is_episode() {
|
if single_format.is_episode() {
|
||||||
bail!(
|
bail!(
|
||||||
"Resolution ({}) is not available for episode {} ({}) of {} season {}",
|
"Resolution ({}) is not available for episode {} ({}) of {} season {}",
|
||||||
|
|
@ -194,8 +424,23 @@ async fn get_format(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let subtitle = if let Some(subtitle_locale) = &download.subtitle {
|
let subtitle = if contains_hardsub {
|
||||||
stream.subtitles.get(subtitle_locale).map(|s| s.clone())
|
None
|
||||||
|
} else if let Some(subtitle_locale) = &download.subtitle {
|
||||||
|
if download.audio == Locale::ja_JP {
|
||||||
|
stream
|
||||||
|
.subtitles
|
||||||
|
.get(subtitle_locale)
|
||||||
|
// use closed captions as fallback if no actual subtitles are found
|
||||||
|
.or_else(|| stream.captions.get(subtitle_locale))
|
||||||
|
.cloned()
|
||||||
|
} else {
|
||||||
|
stream
|
||||||
|
.captions
|
||||||
|
.get(subtitle_locale)
|
||||||
|
.or_else(|| stream.subtitles.get(subtitle_locale))
|
||||||
|
.cloned()
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
@ -203,15 +448,36 @@ async fn get_format(
|
||||||
let download_format = DownloadFormat {
|
let download_format = DownloadFormat {
|
||||||
video: (video.clone(), single_format.audio.clone()),
|
video: (video.clone(), single_format.audio.clone()),
|
||||||
audios: vec![(audio, single_format.audio.clone())],
|
audios: vec![(audio, single_format.audio.clone())],
|
||||||
subtitles: subtitle
|
subtitles: subtitle.clone().map_or(vec![], |s| {
|
||||||
.clone()
|
vec![(
|
||||||
.map_or(vec![], |s| vec![(s, single_format.audio == Locale::ja_JP || stream.subtitles.len() > 1)]),
|
s,
|
||||||
|
single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1,
|
||||||
|
)]
|
||||||
|
}),
|
||||||
|
metadata: DownloadFormatMetadata {
|
||||||
|
skip_events: if download.include_chapters {
|
||||||
|
single_format.skip_events().await?
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
let format = Format::from_single_formats(vec![(
|
let mut format = Format::from_single_formats(vec![(
|
||||||
single_format.clone(),
|
single_format.clone(),
|
||||||
video,
|
video,
|
||||||
subtitle.map_or(vec![], |s| vec![(s, single_format.audio == Locale::ja_JP || stream.subtitles.len() > 1)]),
|
subtitle.map_or(vec![], |s| {
|
||||||
|
vec![(
|
||||||
|
s,
|
||||||
|
single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1,
|
||||||
|
)]
|
||||||
|
}),
|
||||||
)]);
|
)]);
|
||||||
|
if contains_hardsub {
|
||||||
|
let (_, subs) = format.locales.get_mut(0).unwrap();
|
||||||
|
subs.push(download.subtitle.clone().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
stream.invalidate().await?;
|
||||||
|
|
||||||
Ok((download_format, format))
|
Ok((download_format, format))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,231 +0,0 @@
|
||||||
use crate::download::Download;
|
|
||||||
use crate::utils::filter::Filter;
|
|
||||||
use crate::utils::format::{Format, SingleFormat, SingleFormatCollection};
|
|
||||||
use crate::utils::parse::UrlFilter;
|
|
||||||
use anyhow::{bail, Result};
|
|
||||||
use crunchyroll_rs::{Concert, Episode, Movie, MovieListing, MusicVideo, Season, Series};
|
|
||||||
use log::{error, warn};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
pub(crate) struct DownloadFilter {
|
|
||||||
url_filter: UrlFilter,
|
|
||||||
download: Download,
|
|
||||||
season_episode_count: HashMap<u32, Vec<String>>,
|
|
||||||
season_subtitles_missing: Vec<u32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DownloadFilter {
|
|
||||||
pub(crate) fn new(url_filter: UrlFilter, download: Download) -> Self {
|
|
||||||
Self {
|
|
||||||
url_filter,
|
|
||||||
download,
|
|
||||||
season_episode_count: HashMap::new(),
|
|
||||||
season_subtitles_missing: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl Filter for DownloadFilter {
|
|
||||||
type T = SingleFormat;
|
|
||||||
type Output = SingleFormatCollection;
|
|
||||||
|
|
||||||
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>> {
|
|
||||||
// `series.audio_locales` isn't always populated b/c of crunchyrolls api. so check if the
|
|
||||||
// audio is matching only if the field is populated
|
|
||||||
if !series.audio_locales.is_empty() {
|
|
||||||
if !series.audio_locales.contains(&self.download.audio) {
|
|
||||||
error!(
|
|
||||||
"Series {} is not available with {} audio",
|
|
||||||
series.title, self.download.audio
|
|
||||||
);
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let seasons = series.seasons().await?;
|
|
||||||
|
|
||||||
Ok(seasons)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_season(&mut self, mut season: Season) -> Result<Vec<Episode>> {
|
|
||||||
if !self.url_filter.is_season_valid(season.season_number) {
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !season
|
|
||||||
.audio_locales
|
|
||||||
.iter()
|
|
||||||
.any(|l| l == &self.download.audio)
|
|
||||||
{
|
|
||||||
if season
|
|
||||||
.available_versions()
|
|
||||||
.await?
|
|
||||||
.iter()
|
|
||||||
.any(|l| l == &self.download.audio)
|
|
||||||
{
|
|
||||||
season = season
|
|
||||||
.version(vec![self.download.audio.clone()])
|
|
||||||
.await?
|
|
||||||
.remove(0)
|
|
||||||
} else {
|
|
||||||
error!(
|
|
||||||
"Season {} - '{}' is not available with {} audio",
|
|
||||||
season.season_number,
|
|
||||||
season.title,
|
|
||||||
self.download.audio.clone(),
|
|
||||||
);
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut episodes = season.episodes().await?;
|
|
||||||
|
|
||||||
if Format::has_relative_episodes_fmt(&self.download.output) {
|
|
||||||
for episode in episodes.iter() {
|
|
||||||
self.season_episode_count
|
|
||||||
.entry(episode.season_number)
|
|
||||||
.or_insert(vec![])
|
|
||||||
.push(episode.id.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
episodes.retain(|e| {
|
|
||||||
self.url_filter
|
|
||||||
.is_episode_valid(e.episode_number, season.season_number)
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(episodes)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_episode(&mut self, mut episode: Episode) -> Result<Option<Self::T>> {
|
|
||||||
if !self
|
|
||||||
.url_filter
|
|
||||||
.is_episode_valid(episode.episode_number, episode.season_number)
|
|
||||||
{
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if the audio locale is correct.
|
|
||||||
// should only be incorrect if the console input was a episode url. otherwise
|
|
||||||
// `DownloadFilter::visit_season` returns the correct episodes with matching audio
|
|
||||||
if episode.audio_locale != self.download.audio {
|
|
||||||
// check if any other version (same episode, other language) of this episode is available
|
|
||||||
// with the requested audio. if not, return an error
|
|
||||||
if !episode
|
|
||||||
.available_versions()
|
|
||||||
.await?
|
|
||||||
.contains(&self.download.audio)
|
|
||||||
{
|
|
||||||
bail!(
|
|
||||||
"Episode {} ({}) of {} season {} is not available with {} audio",
|
|
||||||
episode.episode_number,
|
|
||||||
episode.title,
|
|
||||||
episode.series_title,
|
|
||||||
episode.season_number,
|
|
||||||
self.download.audio
|
|
||||||
)
|
|
||||||
}
|
|
||||||
// overwrite the current episode with the other version episode
|
|
||||||
episode = episode
|
|
||||||
.version(vec![self.download.audio.clone()])
|
|
||||||
.await?
|
|
||||||
.remove(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if the subtitles are supported
|
|
||||||
if let Some(subtitle_locale) = &self.download.subtitle {
|
|
||||||
if !episode.subtitle_locales.contains(subtitle_locale) {
|
|
||||||
// if the episode doesn't have the requested subtitles, print a error. to print this
|
|
||||||
// error only once per season, it's checked if an error got printed before by looking
|
|
||||||
// up if the season id is present in `self.season_subtitles_missing`. if not, print
|
|
||||||
// the error and add the season id to `self.season_subtitles_missing`. if it is
|
|
||||||
// present, skip the error printing
|
|
||||||
if !self
|
|
||||||
.season_subtitles_missing
|
|
||||||
.contains(&episode.season_number)
|
|
||||||
{
|
|
||||||
self.season_subtitles_missing.push(episode.season_number);
|
|
||||||
error!(
|
|
||||||
"{} season {} is not available with {} subtitles",
|
|
||||||
episode.series_title, episode.season_number, subtitle_locale
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// get the relative episode number. only done if the output string has the pattern to include
|
|
||||||
// the relative episode number as this requires some extra fetching
|
|
||||||
let relative_episode_number = if Format::has_relative_episodes_fmt(&self.download.output) {
|
|
||||||
if self
|
|
||||||
.season_episode_count
|
|
||||||
.get(&episode.season_number)
|
|
||||||
.is_none()
|
|
||||||
{
|
|
||||||
let season_episodes = episode.season().await?.episodes().await?;
|
|
||||||
self.season_episode_count.insert(
|
|
||||||
episode.season_number,
|
|
||||||
season_episodes.into_iter().map(|e| e.id).collect(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let relative_episode_number = self
|
|
||||||
.season_episode_count
|
|
||||||
.get(&episode.season_number)
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.position(|id| id == &episode.id)
|
|
||||||
.map(|index| index + 1);
|
|
||||||
if relative_episode_number.is_none() {
|
|
||||||
warn!(
|
|
||||||
"Failed to get relative episode number for episode {} ({}) of {} season {}",
|
|
||||||
episode.episode_number,
|
|
||||||
episode.title,
|
|
||||||
episode.series_title,
|
|
||||||
episode.season_number,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
relative_episode_number
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Some(SingleFormat::new_from_episode(
|
|
||||||
episode.clone(),
|
|
||||||
self.download.subtitle.clone().map_or(vec![], |s| {
|
|
||||||
if episode.subtitle_locales.contains(&s) {
|
|
||||||
vec![s]
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
relative_episode_number.map(|n| n as u32),
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>> {
|
|
||||||
Ok(movie_listing.movies().await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_movie(&mut self, movie: Movie) -> Result<Option<Self::T>> {
|
|
||||||
Ok(Some(SingleFormat::new_from_movie(movie, vec![])))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Option<Self::T>> {
|
|
||||||
Ok(Some(SingleFormat::new_from_music_video(music_video)))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn visit_concert(&mut self, concert: Concert) -> Result<Option<Self::T>> {
|
|
||||||
Ok(Some(SingleFormat::new_from_concert(concert)))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn finish(self, input: Vec<Self::T>) -> Result<Self::Output> {
|
|
||||||
let mut single_format_collection = SingleFormatCollection::new();
|
|
||||||
|
|
||||||
for data in input {
|
|
||||||
single_format_collection.add_single_formats(vec![data])
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(single_format_collection)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
mod command;
|
mod command;
|
||||||
mod filter;
|
|
||||||
|
|
||||||
pub use command::Download;
|
pub use command::Download;
|
||||||
|
|
|
||||||
|
|
@ -5,27 +5,30 @@ use anyhow::bail;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use crunchyroll_rs::crunchyroll::CrunchyrollBuilder;
|
use crunchyroll_rs::crunchyroll::CrunchyrollBuilder;
|
||||||
use crunchyroll_rs::error::CrunchyrollError;
|
use crunchyroll_rs::error::Error;
|
||||||
use crunchyroll_rs::{Crunchyroll, Locale};
|
use crunchyroll_rs::{Crunchyroll, Locale};
|
||||||
use log::{debug, error, warn, LevelFilter};
|
use log::{debug, error, warn, LevelFilter};
|
||||||
use reqwest::Proxy;
|
use reqwest::{Client, Proxy};
|
||||||
use std::{env, fs};
|
use std::{env, fs};
|
||||||
|
|
||||||
mod archive;
|
mod archive;
|
||||||
mod download;
|
mod download;
|
||||||
mod login;
|
mod login;
|
||||||
|
mod search;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
|
use crate::utils::rate_limit::RateLimiterService;
|
||||||
pub use archive::Archive;
|
pub use archive::Archive;
|
||||||
|
use dialoguer::console::Term;
|
||||||
pub use download::Download;
|
pub use download::Download;
|
||||||
pub use login::Login;
|
pub use login::Login;
|
||||||
|
pub use search::Search;
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
|
||||||
trait Execute {
|
trait Execute {
|
||||||
fn pre_check(&mut self) -> Result<()> {
|
fn pre_check(&mut self) -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
async fn execute(mut self, ctx: Context) -> Result<()>;
|
async fn execute(self, ctx: Context) -> Result<()>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
#[derive(Debug, Parser)]
|
||||||
|
|
@ -33,32 +36,48 @@ trait Execute {
|
||||||
#[clap(name = "crunchy-cli")]
|
#[clap(name = "crunchy-cli")]
|
||||||
pub struct Cli {
|
pub struct Cli {
|
||||||
#[clap(flatten)]
|
#[clap(flatten)]
|
||||||
verbosity: Option<Verbosity>,
|
verbosity: Verbosity,
|
||||||
|
|
||||||
#[arg(
|
#[arg(
|
||||||
help = "Overwrite the language in which results are returned. Default is your system language"
|
help = "Overwrite the language in which results are returned. Default is your system language"
|
||||||
)]
|
)]
|
||||||
#[arg(long)]
|
#[arg(global = true, long)]
|
||||||
lang: Option<Locale>,
|
lang: Option<Locale>,
|
||||||
|
|
||||||
#[arg(help = "Enable experimental fixes which may resolve some unexpected errors")]
|
#[arg(
|
||||||
|
help = "Enable experimental fixes which may resolve some unexpected errors. Generally not recommended as this flag may crash the program completely"
|
||||||
|
)]
|
||||||
#[arg(
|
#[arg(
|
||||||
long_help = "Enable experimental fixes which may resolve some unexpected errors. \
|
long_help = "Enable experimental fixes which may resolve some unexpected errors. \
|
||||||
|
It is not recommended to use this this flag regularly, it might cause unexpected errors which may crash the program completely. \
|
||||||
If everything works as intended this option isn't needed, but sometimes Crunchyroll mislabels \
|
If everything works as intended this option isn't needed, but sometimes Crunchyroll mislabels \
|
||||||
the audio of a series/season or episode or returns a wrong season number. This is when using this option might help to solve the issue"
|
the audio of a series/season or episode or returns a wrong season number. This is when using this option might help to solve the issue"
|
||||||
)]
|
)]
|
||||||
#[arg(long, default_value_t = false)]
|
#[arg(global = true, long, default_value_t = false)]
|
||||||
experimental_fixes: bool,
|
experimental_fixes: bool,
|
||||||
|
|
||||||
#[clap(flatten)]
|
#[clap(flatten)]
|
||||||
login_method: LoginMethod,
|
login_method: login::LoginMethod,
|
||||||
|
|
||||||
#[arg(help = "Use a proxy to route all traffic through")]
|
#[arg(help = "Use a proxy to route all traffic through")]
|
||||||
#[arg(long_help = "Use a proxy to route all traffic through. \
|
#[arg(long_help = "Use a proxy to route all traffic through. \
|
||||||
Make sure that the proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself")]
|
Make sure that the proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself. \
|
||||||
#[clap(long)]
|
Besides specifying a simple url, you also can partially control where a proxy should be used: '<url>:' only proxies api requests, ':<url>' only proxies download traffic, '<url>:<url>' proxies api requests through the first url and download traffic through the second url")]
|
||||||
#[arg(value_parser = crate::utils::clap::clap_parse_proxy)]
|
#[arg(global = true, long, value_parser = crate::utils::clap::clap_parse_proxies)]
|
||||||
proxy: Option<Proxy>,
|
proxy: Option<(Option<Proxy>, Option<Proxy>)>,
|
||||||
|
|
||||||
|
#[arg(help = "Use custom user agent")]
|
||||||
|
#[arg(global = true, long)]
|
||||||
|
user_agent: Option<String>,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
help = "Maximal speed to download/request (may be a bit off here and there). Must be in format of <number>[B|KB|MB]"
|
||||||
|
)]
|
||||||
|
#[arg(
|
||||||
|
long_help = "Maximal speed to download/request (may be a bit off here and there). Must be in format of <number>[B|KB|MB] (e.g. 500KB or 10MB)"
|
||||||
|
)]
|
||||||
|
#[arg(global = true, long, value_parser = crate::utils::clap::clap_parse_speed_limit)]
|
||||||
|
speed_limit: Option<u32>,
|
||||||
|
|
||||||
#[clap(subcommand)]
|
#[clap(subcommand)]
|
||||||
command: Command,
|
command: Command,
|
||||||
|
|
@ -70,7 +89,7 @@ fn version() -> String {
|
||||||
let build_date = env!("BUILD_DATE");
|
let build_date = env!("BUILD_DATE");
|
||||||
|
|
||||||
if git_commit_hash.is_empty() {
|
if git_commit_hash.is_empty() {
|
||||||
format!("{}", package_version)
|
package_version.to_string()
|
||||||
} else {
|
} else {
|
||||||
format!("{} ({} {})", package_version, git_commit_hash, build_date)
|
format!("{} ({} {})", package_version, git_commit_hash, build_date)
|
||||||
}
|
}
|
||||||
|
|
@ -81,50 +100,33 @@ enum Command {
|
||||||
Archive(Archive),
|
Archive(Archive),
|
||||||
Download(Download),
|
Download(Download),
|
||||||
Login(Login),
|
Login(Login),
|
||||||
|
Search(Search),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
#[derive(Debug, Parser)]
|
||||||
struct Verbosity {
|
struct Verbosity {
|
||||||
#[arg(help = "Verbose output")]
|
#[arg(help = "Verbose output")]
|
||||||
#[arg(short)]
|
#[arg(global = true, short, long)]
|
||||||
v: bool,
|
verbose: bool,
|
||||||
|
|
||||||
#[arg(help = "Quiet output. Does not print anything unless it's a error")]
|
#[arg(help = "Quiet output. Does not print anything unless it's a error")]
|
||||||
#[arg(
|
#[arg(
|
||||||
long_help = "Quiet output. Does not print anything unless it's a error. Can be helpful if you pipe the output to stdout"
|
long_help = "Quiet output. Does not print anything unless it's a error. Can be helpful if you pipe the output to stdout"
|
||||||
)]
|
)]
|
||||||
#[arg(short)]
|
#[arg(global = true, short, long)]
|
||||||
q: bool,
|
quiet: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
pub async fn main(args: &[String]) {
|
||||||
struct LoginMethod {
|
let mut cli: Cli = Cli::parse_from(args);
|
||||||
#[arg(
|
|
||||||
help = "Login with credentials (username or email and password). Must be provided as user:password"
|
|
||||||
)]
|
|
||||||
#[arg(long)]
|
|
||||||
credentials: Option<String>,
|
|
||||||
#[arg(help = "Login with the etp-rt cookie")]
|
|
||||||
#[arg(
|
|
||||||
long_help = "Login with the etp-rt cookie. This can be obtained when you login on crunchyroll.com and extract it from there"
|
|
||||||
)]
|
|
||||||
#[arg(long)]
|
|
||||||
etp_rt: Option<String>,
|
|
||||||
#[arg(help = "Login anonymously / without an account")]
|
|
||||||
#[arg(long, default_value_t = false)]
|
|
||||||
anonymous: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn cli_entrypoint() {
|
if cli.verbosity.verbose || cli.verbosity.quiet {
|
||||||
let mut cli: Cli = Cli::parse();
|
if cli.verbosity.verbose && cli.verbosity.quiet {
|
||||||
|
|
||||||
if let Some(verbosity) = &cli.verbosity {
|
|
||||||
if verbosity.v as u8 + verbosity.q as u8 > 1 {
|
|
||||||
eprintln!("Output cannot be verbose ('-v') and quiet ('-q') at the same time");
|
eprintln!("Output cannot be verbose ('-v') and quiet ('-q') at the same time");
|
||||||
std::process::exit(1)
|
std::process::exit(1)
|
||||||
} else if verbosity.v {
|
} else if cli.verbosity.verbose {
|
||||||
CliLogger::init(LevelFilter::Debug).unwrap()
|
CliLogger::init(LevelFilter::Debug).unwrap()
|
||||||
} else if verbosity.q {
|
} else if cli.verbosity.quiet {
|
||||||
CliLogger::init(LevelFilter::Error).unwrap()
|
CliLogger::init(LevelFilter::Error).unwrap()
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -134,8 +136,20 @@ pub async fn cli_entrypoint() {
|
||||||
debug!("cli input: {:?}", cli);
|
debug!("cli input: {:?}", cli);
|
||||||
|
|
||||||
match &mut cli.command {
|
match &mut cli.command {
|
||||||
Command::Archive(archive) => pre_check_executor(archive).await,
|
Command::Archive(archive) => {
|
||||||
Command::Download(download) => pre_check_executor(download).await,
|
// prevent interactive select to be shown when output should be quiet
|
||||||
|
if cli.verbosity.quiet {
|
||||||
|
archive.yes = true;
|
||||||
|
}
|
||||||
|
pre_check_executor(archive).await
|
||||||
|
}
|
||||||
|
Command::Download(download) => {
|
||||||
|
// prevent interactive select to be shown when output should be quiet
|
||||||
|
if cli.verbosity.quiet {
|
||||||
|
download.yes = true;
|
||||||
|
}
|
||||||
|
pre_check_executor(download).await
|
||||||
|
}
|
||||||
Command::Login(login) => {
|
Command::Login(login) => {
|
||||||
if login.remove {
|
if login.remove {
|
||||||
if let Some(session_file) = login::session_file_path() {
|
if let Some(session_file) = login::session_file_path() {
|
||||||
|
|
@ -146,9 +160,10 @@ pub async fn cli_entrypoint() {
|
||||||
pre_check_executor(login).await
|
pre_check_executor(login).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Command::Search(search) => pre_check_executor(search).await,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = match create_ctx(&cli).await {
|
let ctx = match create_ctx(&mut cli).await {
|
||||||
Ok(ctx) => ctx,
|
Ok(ctx) => ctx,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("{}", e);
|
error!("{}", e);
|
||||||
|
|
@ -159,7 +174,7 @@ pub async fn cli_entrypoint() {
|
||||||
|
|
||||||
ctrlc::set_handler(move || {
|
ctrlc::set_handler(move || {
|
||||||
debug!("Ctrl-c detected");
|
debug!("Ctrl-c detected");
|
||||||
if let Ok(dir) = fs::read_dir(&env::temp_dir()) {
|
if let Ok(dir) = fs::read_dir(env::temp_dir()) {
|
||||||
for file in dir.flatten() {
|
for file in dir.flatten() {
|
||||||
if file
|
if file
|
||||||
.path()
|
.path()
|
||||||
|
|
@ -169,19 +184,35 @@ pub async fn cli_entrypoint() {
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.starts_with(".crunchy-cli_")
|
.starts_with(".crunchy-cli_")
|
||||||
{
|
{
|
||||||
let result = fs::remove_file(file.path());
|
if file.file_type().map_or(true, |ft| ft.is_file()) {
|
||||||
debug!(
|
let result = fs::remove_file(file.path());
|
||||||
"Ctrl-c removed temporary file {} {}",
|
debug!(
|
||||||
file.path().to_string_lossy(),
|
"Ctrl-c removed temporary file {} {}",
|
||||||
if result.is_ok() {
|
file.path().to_string_lossy(),
|
||||||
"successfully"
|
if result.is_ok() {
|
||||||
} else {
|
"successfully"
|
||||||
"not successfully"
|
} else {
|
||||||
}
|
"not successfully"
|
||||||
)
|
}
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
let result = fs::remove_dir_all(file.path());
|
||||||
|
debug!(
|
||||||
|
"Ctrl-c removed temporary directory {} {}",
|
||||||
|
file.path().to_string_lossy(),
|
||||||
|
if result.is_ok() {
|
||||||
|
"successfully"
|
||||||
|
} else {
|
||||||
|
"not successfully"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// when pressing ctrl-c while interactively choosing seasons the cursor stays hidden, this
|
||||||
|
// line shows it again
|
||||||
|
let _ = Term::stdout().show_cursor();
|
||||||
std::process::exit(1)
|
std::process::exit(1)
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
@ -191,6 +222,7 @@ pub async fn cli_entrypoint() {
|
||||||
Command::Archive(archive) => execute_executor(archive, ctx).await,
|
Command::Archive(archive) => execute_executor(archive, ctx).await,
|
||||||
Command::Download(download) => execute_executor(download, ctx).await,
|
Command::Download(download) => execute_executor(download, ctx).await,
|
||||||
Command::Login(login) => execute_executor(login, ctx).await,
|
Command::Login(login) => execute_executor(login, ctx).await,
|
||||||
|
Command::Search(search) => execute_executor(search, ctx).await,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -202,32 +234,53 @@ async fn pre_check_executor(executor: &mut impl Execute) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn execute_executor(executor: impl Execute, ctx: Context) {
|
async fn execute_executor(executor: impl Execute, ctx: Context) {
|
||||||
if let Err(err) = executor.execute(ctx).await {
|
if let Err(mut err) = executor.execute(ctx).await {
|
||||||
error!("a unexpected error occurred: {}", err);
|
if let Some(crunchy_error) = err.downcast_mut::<Error>() {
|
||||||
|
if let Error::Block { message, .. } = crunchy_error {
|
||||||
if let Some(crunchy_error) = err.downcast_ref::<CrunchyrollError>() {
|
*message = "Triggered Cloudflare bot protection. Try again later or use a VPN or proxy to spoof your location".to_string()
|
||||||
let message = match crunchy_error {
|
|
||||||
CrunchyrollError::Internal(i) => &i.message,
|
|
||||||
CrunchyrollError::Request(r) => &r.message,
|
|
||||||
CrunchyrollError::Decode(d) => &d.message,
|
|
||||||
CrunchyrollError::Authentication(a) => &a.message,
|
|
||||||
CrunchyrollError::Input(i) => &i.message,
|
|
||||||
};
|
|
||||||
if message.contains("content.get_video_streams_v2.cms_service_error") {
|
|
||||||
error!("You've probably hit a rate limit. Try again later, generally after 10-20 minutes the rate limit is over and you can continue to use the cli")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
error!("An error occurred: {}", crunchy_error)
|
||||||
|
} else {
|
||||||
|
error!("An error occurred: {}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
std::process::exit(1)
|
std::process::exit(1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_ctx(cli: &Cli) -> Result<Context> {
|
async fn create_ctx(cli: &mut Cli) -> Result<Context> {
|
||||||
let crunchy = crunchyroll_session(cli).await?;
|
let crunchy_client = reqwest_client(
|
||||||
Ok(Context { crunchy })
|
cli.proxy.as_ref().and_then(|p| p.0.clone()),
|
||||||
|
cli.user_agent.clone(),
|
||||||
|
);
|
||||||
|
let internal_client = reqwest_client(
|
||||||
|
cli.proxy.as_ref().and_then(|p| p.1.clone()),
|
||||||
|
cli.user_agent.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let crunchy = crunchyroll_session(
|
||||||
|
cli,
|
||||||
|
crunchy_client.clone(),
|
||||||
|
cli.speed_limit
|
||||||
|
.map(|l| RateLimiterService::new(l, crunchy_client)),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Context {
|
||||||
|
crunchy,
|
||||||
|
client: internal_client.clone(),
|
||||||
|
rate_limiter: cli
|
||||||
|
.speed_limit
|
||||||
|
.map(|l| RateLimiterService::new(l, internal_client)),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn crunchyroll_session(cli: &Cli) -> Result<Crunchyroll> {
|
async fn crunchyroll_session(
|
||||||
|
cli: &mut Cli,
|
||||||
|
client: Client,
|
||||||
|
rate_limiter: Option<RateLimiterService>,
|
||||||
|
) -> Result<Crunchyroll> {
|
||||||
let supported_langs = vec![
|
let supported_langs = vec![
|
||||||
Locale::ar_ME,
|
Locale::ar_ME,
|
||||||
Locale::de_DE,
|
Locale::de_DE,
|
||||||
|
|
@ -246,7 +299,7 @@ async fn crunchyroll_session(cli: &Cli) -> Result<Crunchyroll> {
|
||||||
"Via `--lang` specified language is not supported. Supported languages: {}",
|
"Via `--lang` specified language is not supported. Supported languages: {}",
|
||||||
supported_langs
|
supported_langs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|l| format!("`{}` ({})", l.to_string(), l.to_human_readable()))
|
.map(|l| format!("`{}` ({})", l, l.to_human_readable()))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", ")
|
.join(", ")
|
||||||
)
|
)
|
||||||
|
|
@ -261,55 +314,59 @@ async fn crunchyroll_session(cli: &Cli) -> Result<Crunchyroll> {
|
||||||
lang
|
lang
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut client_builder = CrunchyrollBuilder::predefined_client_builder();
|
|
||||||
if let Some(proxy) = &cli.proxy {
|
|
||||||
client_builder = client_builder.proxy(proxy.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut builder = Crunchyroll::builder()
|
let mut builder = Crunchyroll::builder()
|
||||||
.client(client_builder.build()?)
|
|
||||||
.locale(locale)
|
.locale(locale)
|
||||||
|
.client(client.clone())
|
||||||
.stabilization_locales(cli.experimental_fixes)
|
.stabilization_locales(cli.experimental_fixes)
|
||||||
.stabilization_season_number(cli.experimental_fixes);
|
.stabilization_season_number(cli.experimental_fixes);
|
||||||
|
|
||||||
if let Command::Download(download) = &cli.command {
|
if let Command::Download(download) = &cli.command {
|
||||||
builder = builder.preferred_audio_locale(download.audio.clone())
|
builder = builder.preferred_audio_locale(download.audio.clone())
|
||||||
}
|
}
|
||||||
|
if let Some(rate_limiter) = rate_limiter {
|
||||||
|
builder = builder.middleware(rate_limiter)
|
||||||
|
}
|
||||||
|
|
||||||
let login_methods_count = cli.login_method.credentials.is_some() as u8
|
let root_login_methods_count =
|
||||||
+ cli.login_method.etp_rt.is_some() as u8
|
cli.login_method.credentials.is_some() as u8 + cli.login_method.anonymous as u8;
|
||||||
+ cli.login_method.anonymous as u8;
|
|
||||||
|
|
||||||
let progress_handler = progress!("Logging in");
|
let progress_handler = progress!("Logging in");
|
||||||
if login_methods_count == 0 {
|
if root_login_methods_count == 0 {
|
||||||
if let Some(login_file_path) = login::session_file_path() {
|
if let Some(login_file_path) = login::session_file_path() {
|
||||||
if login_file_path.exists() {
|
if login_file_path.exists() {
|
||||||
let session = fs::read_to_string(login_file_path)?;
|
let session = fs::read_to_string(login_file_path)?;
|
||||||
if let Some((token_type, token)) = session.split_once(':') {
|
if let Some((token_type, token)) = session.split_once(':') {
|
||||||
match token_type {
|
match token_type {
|
||||||
"refresh_token" => {
|
"refresh_token" => {
|
||||||
return Ok(builder.login_with_refresh_token(token).await?)
|
return match builder.login_with_refresh_token(token).await {
|
||||||
|
Ok(crunchy) => Ok(crunchy),
|
||||||
|
Err(e) => {
|
||||||
|
if let Error::Request { message, .. } = &e {
|
||||||
|
if message.starts_with("invalid_grant") {
|
||||||
|
bail!("The stored login is expired, please login again")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
"etp_rt" => return Ok(builder.login_with_etp_rt(token).await?),
|
"etp_rt" => bail!("The stored login method (etp-rt) isn't supported anymore. Please login again using your credentials"),
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
bail!("Could not read stored session ('{}')", session)
|
bail!("Could not read stored session ('{}')", session)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
bail!("Please use a login method ('--credentials', '--etp-rt' or '--anonymous')")
|
bail!("Please use a login method ('--credentials' or '--anonymous')")
|
||||||
} else if login_methods_count > 1 {
|
} else if root_login_methods_count > 1 {
|
||||||
bail!("Please use only one login method ('--credentials', '--etp-rt' or '--anonymous')")
|
bail!("Please use only one login method ('--credentials' or '--anonymous')")
|
||||||
}
|
}
|
||||||
|
|
||||||
let crunchy = if let Some(credentials) = &cli.login_method.credentials {
|
let crunchy = if let Some(credentials) = &cli.login_method.credentials {
|
||||||
if let Some((user, password)) = credentials.split_once(':') {
|
if let Some((email, password)) = credentials.split_once(':') {
|
||||||
builder.login_with_credentials(user, password).await?
|
builder.login_with_credentials(email, password).await?
|
||||||
} else {
|
} else {
|
||||||
bail!("Invalid credentials format. Please provide your credentials as user:password")
|
bail!("Invalid credentials format. Please provide your credentials as email:password")
|
||||||
}
|
}
|
||||||
} else if let Some(etp_rt) = &cli.login_method.etp_rt {
|
|
||||||
builder.login_with_etp_rt(etp_rt).await?
|
|
||||||
} else if cli.login_method.anonymous {
|
} else if cli.login_method.anonymous {
|
||||||
builder.login_anonymously().await?
|
builder.login_anonymously().await?
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -320,3 +377,29 @@ async fn crunchyroll_session(cli: &Cli) -> Result<Crunchyroll> {
|
||||||
|
|
||||||
Ok(crunchy)
|
Ok(crunchy)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn reqwest_client(proxy: Option<Proxy>, user_agent: Option<String>) -> Client {
|
||||||
|
let mut builder = CrunchyrollBuilder::predefined_client_builder();
|
||||||
|
if let Some(p) = proxy {
|
||||||
|
builder = builder.proxy(p)
|
||||||
|
}
|
||||||
|
if let Some(ua) = user_agent {
|
||||||
|
builder = builder.user_agent(ua)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "openssl-tls", feature = "openssl-tls-static"))]
|
||||||
|
let client = {
|
||||||
|
let mut builder = builder.use_native_tls().tls_built_in_root_certs(false);
|
||||||
|
|
||||||
|
for certificate in rustls_native_certs::load_native_certs().unwrap() {
|
||||||
|
builder =
|
||||||
|
builder.add_root_certificate(reqwest::Certificate::from_der(&certificate).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.build().unwrap()
|
||||||
|
};
|
||||||
|
#[cfg(not(any(feature = "openssl-tls", feature = "openssl-tls-static")))]
|
||||||
|
let client = builder.build().unwrap();
|
||||||
|
|
||||||
|
client
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,40 +2,54 @@ use crate::utils::context::Context;
|
||||||
use crate::Execute;
|
use crate::Execute;
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use clap::Parser;
|
||||||
use crunchyroll_rs::crunchyroll::SessionToken;
|
use crunchyroll_rs::crunchyroll::SessionToken;
|
||||||
|
use log::info;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[derive(Debug, clap::Parser)]
|
#[derive(Debug, clap::Parser)]
|
||||||
#[clap(about = "Save your login credentials persistent on disk")]
|
#[clap(about = "Save your login credentials persistent on disk")]
|
||||||
pub struct Login {
|
pub struct Login {
|
||||||
#[arg(help = "Remove your stored credentials (instead of save them)")]
|
#[arg(help = "Remove your stored credentials (instead of saving them)")]
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
pub remove: bool,
|
pub remove: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
|
||||||
impl Execute for Login {
|
impl Execute for Login {
|
||||||
async fn execute(self, ctx: Context) -> Result<()> {
|
async fn execute(self, ctx: Context) -> Result<()> {
|
||||||
if let Some(login_file_path) = session_file_path() {
|
if let Some(login_file_path) = session_file_path() {
|
||||||
fs::create_dir_all(login_file_path.parent().unwrap())?;
|
fs::create_dir_all(login_file_path.parent().unwrap())?;
|
||||||
|
|
||||||
match ctx.crunchy.session_token().await {
|
match ctx.crunchy.session_token().await {
|
||||||
SessionToken::RefreshToken(refresh_token) => Ok(fs::write(
|
SessionToken::RefreshToken(refresh_token) => {
|
||||||
login_file_path,
|
fs::write(login_file_path, format!("refresh_token:{}", refresh_token))?
|
||||||
format!("refresh_token:{}", refresh_token),
|
|
||||||
)?),
|
|
||||||
SessionToken::EtpRt(etp_rt) => {
|
|
||||||
Ok(fs::write(login_file_path, format!("etp_rt:{}", etp_rt))?)
|
|
||||||
}
|
}
|
||||||
|
SessionToken::EtpRt(_) => bail!("Login with etp_rt isn't supported anymore. Please use your credentials to login"),
|
||||||
SessionToken::Anonymous => bail!("Anonymous login cannot be saved"),
|
SessionToken::Anonymous => bail!("Anonymous login cannot be saved"),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
info!("Saved login");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
bail!("Cannot find config path")
|
bail!("Cannot find config path")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Parser)]
|
||||||
|
pub struct LoginMethod {
|
||||||
|
#[arg(
|
||||||
|
help = "Login with credentials (email and password). Must be provided as email:password"
|
||||||
|
)]
|
||||||
|
#[arg(global = true, long)]
|
||||||
|
pub credentials: Option<String>,
|
||||||
|
#[arg(help = "Login anonymously / without an account")]
|
||||||
|
#[arg(global = true, long, default_value_t = false)]
|
||||||
|
pub anonymous: bool,
|
||||||
|
}
|
||||||
|
|
||||||
pub fn session_file_path() -> Option<PathBuf> {
|
pub fn session_file_path() -> Option<PathBuf> {
|
||||||
dirs::config_dir().map(|config_dir| config_dir.join("crunchy-cli").join("session"))
|
dirs::config_dir().map(|config_dir| config_dir.join("crunchy-cli").join("session"))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
mod command;
|
mod command;
|
||||||
|
|
||||||
pub use command::session_file_path;
|
pub use command::{session_file_path, Login, LoginMethod};
|
||||||
pub use command::Login;
|
|
||||||
|
|
|
||||||
222
crunchy-cli-core/src/search/command.rs
Normal file
222
crunchy-cli-core/src/search/command.rs
Normal file
|
|
@ -0,0 +1,222 @@
|
||||||
|
use crate::search::filter::FilterOptions;
|
||||||
|
use crate::search::format::Format;
|
||||||
|
use crate::utils::context::Context;
|
||||||
|
use crate::utils::parse::{parse_url, UrlFilter};
|
||||||
|
use crate::Execute;
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use crunchyroll_rs::common::StreamExt;
|
||||||
|
use crunchyroll_rs::search::QueryResults;
|
||||||
|
use crunchyroll_rs::{Episode, Locale, MediaCollection, MovieListing, MusicVideo, Series};
|
||||||
|
use log::warn;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
#[derive(Debug, clap::Parser)]
|
||||||
|
#[clap(about = "Search in videos")]
|
||||||
|
#[command(arg_required_else_help(true))]
|
||||||
|
pub struct Search {
|
||||||
|
#[arg(help = format!("Audio languages to include. \
|
||||||
|
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
|
#[arg(long_help = format!("Audio languages to include. \
|
||||||
|
Available languages are:\n {}", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
||||||
|
#[arg(long, default_values_t = vec![crate::utils::locale::system_locale()])]
|
||||||
|
audio: Vec<Locale>,
|
||||||
|
|
||||||
|
#[arg(help = "Limit of search top search results")]
|
||||||
|
#[arg(long, default_value_t = 5)]
|
||||||
|
search_top_results_limit: u32,
|
||||||
|
#[arg(help = "Limit of search series results")]
|
||||||
|
#[arg(long, default_value_t = 0)]
|
||||||
|
search_series_limit: u32,
|
||||||
|
#[arg(help = "Limit of search movie listing results")]
|
||||||
|
#[arg(long, default_value_t = 0)]
|
||||||
|
search_movie_listing_limit: u32,
|
||||||
|
#[arg(help = "Limit of search episode results")]
|
||||||
|
#[arg(long, default_value_t = 0)]
|
||||||
|
search_episode_limit: u32,
|
||||||
|
#[arg(help = "Limit of search music results")]
|
||||||
|
#[arg(long, default_value_t = 0)]
|
||||||
|
search_music_limit: u32,
|
||||||
|
|
||||||
|
/// Format of the output text.
|
||||||
|
///
|
||||||
|
/// You can specify keywords in a specific pattern and they will get replaced in the output text.
|
||||||
|
/// The required pattern for this begins with `{{`, then the keyword, and closes with `}}` (e.g. `{{episode.title}}`).
|
||||||
|
/// For example, if you want to get the title of an episode, you can use `Title {{episode.title}}` and `{{episode.title}}` will be replaced with the episode title
|
||||||
|
///
|
||||||
|
/// See the following list for all keywords and their meaning:
|
||||||
|
/// series.id → Series id
|
||||||
|
/// series.title → Series title
|
||||||
|
/// series.description → Series description
|
||||||
|
/// series.release_year → Series release year
|
||||||
|
///
|
||||||
|
/// season.id → Season id
|
||||||
|
/// season.title → Season title
|
||||||
|
/// season.description → Season description
|
||||||
|
/// season.number → Season number
|
||||||
|
/// season.episodes → Number of episodes the season has
|
||||||
|
///
|
||||||
|
/// episode.id → Episode id
|
||||||
|
/// episode.title → Episode title
|
||||||
|
/// episode.description → Episode description
|
||||||
|
/// episode.locale → Episode locale/language
|
||||||
|
/// episode.number → Episode number
|
||||||
|
/// episode.sequence_number → Episode number. This number is unique unlike `episode.number` which sometimes can be duplicated
|
||||||
|
/// episode.duration → Episode duration in milliseconds
|
||||||
|
/// episode.air_date → Episode air date as unix timestamp
|
||||||
|
/// episode.premium_only → If the episode is only available with Crunchyroll premium
|
||||||
|
///
|
||||||
|
/// movie_listing.id → Movie listing id
|
||||||
|
/// movie_listing.title → Movie listing title
|
||||||
|
/// movie_listing.description → Movie listing description
|
||||||
|
///
|
||||||
|
/// movie.id → Movie id
|
||||||
|
/// movie.title → Movie title
|
||||||
|
/// movie.description → Movie description
|
||||||
|
/// movie.duration → Movie duration in milliseconds
|
||||||
|
/// movie.premium_only → If the movie is only available with Crunchyroll premium
|
||||||
|
///
|
||||||
|
/// music_video.id → Music video id
|
||||||
|
/// music_video.title → Music video title
|
||||||
|
/// music_video.description → Music video description
|
||||||
|
/// music_video.duration → Music video duration in milliseconds
|
||||||
|
/// music_video.premium_only → If the music video is only available with Crunchyroll premium
|
||||||
|
///
|
||||||
|
/// concert.id → Concert id
|
||||||
|
/// concert.title → Concert title
|
||||||
|
/// concert.description → Concert description
|
||||||
|
/// concert.duration → Concert duration in milliseconds
|
||||||
|
/// concert.premium_only → If the concert is only available with Crunchyroll premium
|
||||||
|
///
|
||||||
|
/// stream.locale → Stream locale/language
|
||||||
|
/// stream.dash_url → Stream url in DASH format. You need to set the `Authorization` header to `Bearer <account.token>` when requesting this url
|
||||||
|
/// stream.is_drm → If `stream.dash_url` is DRM encrypted
|
||||||
|
///
|
||||||
|
/// subtitle.locale → Subtitle locale/language
|
||||||
|
/// subtitle.url → Url to the subtitle
|
||||||
|
///
|
||||||
|
/// account.token → Access token to make request to restricted endpoints. This token is only valid for a max. of 5 minutes
|
||||||
|
/// account.id → Internal ID of the user account
|
||||||
|
/// account.profile_name → Profile name of the account
|
||||||
|
/// account.email → Email address of the account
|
||||||
|
#[arg(short, long, verbatim_doc_comment)]
|
||||||
|
#[arg(default_value = "S{{season.number}}E{{episode.number}} - {{episode.title}}")]
|
||||||
|
output: String,
|
||||||
|
|
||||||
|
input: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Execute for Search {
|
||||||
|
async fn execute(self, ctx: Context) -> Result<()> {
|
||||||
|
if !ctx.crunchy.premium().await {
|
||||||
|
warn!("Using `search` anonymously or with a non-premium account may return incomplete results")
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.output.contains("{{stream.is_drm}}") {
|
||||||
|
warn!("The `{{{{stream.is_drm}}}}` option is deprecated as it isn't reliable anymore and will be removed soon")
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = if crunchyroll_rs::parse::parse_url(&self.input).is_some() {
|
||||||
|
match parse_url(&ctx.crunchy, self.input.clone(), true).await {
|
||||||
|
Ok(ok) => vec![ok],
|
||||||
|
Err(e) => bail!("url {} could not be parsed: {}", self.input, e),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let mut output = vec![];
|
||||||
|
|
||||||
|
let query = resolve_query(&self, ctx.crunchy.query(&self.input)).await?;
|
||||||
|
output.extend(query.0.into_iter().map(|m| (m, UrlFilter::default())));
|
||||||
|
output.extend(
|
||||||
|
query
|
||||||
|
.1
|
||||||
|
.into_iter()
|
||||||
|
.map(|s| (s.into(), UrlFilter::default())),
|
||||||
|
);
|
||||||
|
output.extend(
|
||||||
|
query
|
||||||
|
.2
|
||||||
|
.into_iter()
|
||||||
|
.map(|m| (m.into(), UrlFilter::default())),
|
||||||
|
);
|
||||||
|
output.extend(
|
||||||
|
query
|
||||||
|
.3
|
||||||
|
.into_iter()
|
||||||
|
.map(|e| (e.into(), UrlFilter::default())),
|
||||||
|
);
|
||||||
|
output.extend(
|
||||||
|
query
|
||||||
|
.4
|
||||||
|
.into_iter()
|
||||||
|
.map(|m| (m.into(), UrlFilter::default())),
|
||||||
|
);
|
||||||
|
|
||||||
|
output
|
||||||
|
};
|
||||||
|
|
||||||
|
let crunchy_arc = Arc::new(ctx.crunchy);
|
||||||
|
for (media_collection, url_filter) in input {
|
||||||
|
let filter_options = FilterOptions {
|
||||||
|
audio: self.audio.clone(),
|
||||||
|
url_filter,
|
||||||
|
};
|
||||||
|
|
||||||
|
let format = Format::new(self.output.clone(), filter_options, crunchy_arc.clone())?;
|
||||||
|
println!("{}", format.parse(media_collection).await?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! resolve_query {
|
||||||
|
($limit:expr, $vec:expr, $item:expr) => {
|
||||||
|
if $limit > 0 {
|
||||||
|
let mut item_results = $item;
|
||||||
|
while let Some(item) = item_results.next().await {
|
||||||
|
$vec.push(item?);
|
||||||
|
if $vec.len() >= $limit as usize {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn resolve_query(
|
||||||
|
search: &Search,
|
||||||
|
query_results: QueryResults,
|
||||||
|
) -> Result<(
|
||||||
|
Vec<MediaCollection>,
|
||||||
|
Vec<Series>,
|
||||||
|
Vec<MovieListing>,
|
||||||
|
Vec<Episode>,
|
||||||
|
Vec<MusicVideo>,
|
||||||
|
)> {
|
||||||
|
let mut media_collection = vec![];
|
||||||
|
let mut series = vec![];
|
||||||
|
let mut movie_listing = vec![];
|
||||||
|
let mut episode = vec![];
|
||||||
|
let mut music_video = vec![];
|
||||||
|
|
||||||
|
resolve_query!(
|
||||||
|
search.search_top_results_limit,
|
||||||
|
media_collection,
|
||||||
|
query_results.top_results
|
||||||
|
);
|
||||||
|
resolve_query!(search.search_series_limit, series, query_results.series);
|
||||||
|
resolve_query!(
|
||||||
|
search.search_movie_listing_limit,
|
||||||
|
movie_listing,
|
||||||
|
query_results.movie_listing
|
||||||
|
);
|
||||||
|
resolve_query!(search.search_episode_limit, episode, query_results.episode);
|
||||||
|
resolve_query!(search.search_music_limit, music_video, query_results.music);
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
media_collection,
|
||||||
|
series,
|
||||||
|
movie_listing,
|
||||||
|
episode,
|
||||||
|
music_video,
|
||||||
|
))
|
||||||
|
}
|
||||||
47
crunchy-cli-core/src/search/filter.rs
Normal file
47
crunchy-cli-core/src/search/filter.rs
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
use crate::utils::parse::UrlFilter;
|
||||||
|
use crunchyroll_rs::{Episode, Locale, MovieListing, Season, Series};
|
||||||
|
|
||||||
|
pub struct FilterOptions {
|
||||||
|
pub audio: Vec<Locale>,
|
||||||
|
pub url_filter: UrlFilter,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FilterOptions {
|
||||||
|
pub fn check_series(&self, series: &Series) -> bool {
|
||||||
|
self.check_audio_language(&series.audio_locales)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn filter_seasons(&self, mut seasons: Vec<Season>) -> Vec<Season> {
|
||||||
|
seasons.retain(|s| {
|
||||||
|
self.check_audio_language(&s.audio_locales)
|
||||||
|
&& self.url_filter.is_season_valid(s.season_number)
|
||||||
|
});
|
||||||
|
seasons
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn filter_episodes(&self, mut episodes: Vec<Episode>) -> Vec<Episode> {
|
||||||
|
episodes.retain(|e| {
|
||||||
|
self.check_audio_language(&[e.audio_locale.clone()])
|
||||||
|
&& self
|
||||||
|
.url_filter
|
||||||
|
.is_episode_valid(e.sequence_number, e.season_number)
|
||||||
|
});
|
||||||
|
episodes
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn check_movie_listing(&self, movie_listing: &MovieListing) -> bool {
|
||||||
|
self.check_audio_language(
|
||||||
|
&movie_listing
|
||||||
|
.audio_locale
|
||||||
|
.clone()
|
||||||
|
.map_or(vec![], |a| vec![a.clone()]),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_audio_language(&self, audio: &[Locale]) -> bool {
|
||||||
|
if !self.audio.is_empty() {
|
||||||
|
return self.audio.iter().any(|a| audio.contains(a));
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
687
crunchy-cli-core/src/search/format.rs
Normal file
687
crunchy-cli-core/src/search/format.rs
Normal file
|
|
@ -0,0 +1,687 @@
|
||||||
|
use crate::search::filter::FilterOptions;
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use crunchyroll_rs::media::{Stream, Subtitle};
|
||||||
|
use crunchyroll_rs::{
|
||||||
|
Concert, Crunchyroll, Episode, Locale, MediaCollection, Movie, MovieListing, MusicVideo,
|
||||||
|
Season, Series,
|
||||||
|
};
|
||||||
|
use regex::Regex;
|
||||||
|
use serde::Serialize;
|
||||||
|
use serde_json::{Map, Value};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::ops::Range;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatSeries {
|
||||||
|
pub id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub release_year: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Series> for FormatSeries {
|
||||||
|
fn from(value: &Series) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id.clone(),
|
||||||
|
title: value.title.clone(),
|
||||||
|
description: value.description.clone(),
|
||||||
|
release_year: value.series_launch_year.unwrap_or_default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatSeason {
|
||||||
|
pub id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub number: u32,
|
||||||
|
pub episodes: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Season> for FormatSeason {
|
||||||
|
fn from(value: &Season) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id.clone(),
|
||||||
|
title: value.title.clone(),
|
||||||
|
description: value.description.clone(),
|
||||||
|
number: value.season_number,
|
||||||
|
episodes: value.number_of_episodes,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatEpisode {
|
||||||
|
pub id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub locale: Locale,
|
||||||
|
pub number: u32,
|
||||||
|
pub sequence_number: f32,
|
||||||
|
pub duration: i64,
|
||||||
|
pub air_date: i64,
|
||||||
|
pub premium_only: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Episode> for FormatEpisode {
|
||||||
|
fn from(value: &Episode) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id.clone(),
|
||||||
|
title: value.title.clone(),
|
||||||
|
description: value.description.clone(),
|
||||||
|
locale: value.audio_locale.clone(),
|
||||||
|
number: value.episode_number.unwrap_or_default(),
|
||||||
|
sequence_number: value.sequence_number,
|
||||||
|
duration: value.duration.num_milliseconds(),
|
||||||
|
air_date: value.episode_air_date.timestamp(),
|
||||||
|
premium_only: value.is_premium_only,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatMovieListing {
|
||||||
|
pub id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&MovieListing> for FormatMovieListing {
|
||||||
|
fn from(value: &MovieListing) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id.clone(),
|
||||||
|
title: value.title.clone(),
|
||||||
|
description: value.description.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatMovie {
|
||||||
|
pub id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub duration: i64,
|
||||||
|
pub premium_only: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Movie> for FormatMovie {
|
||||||
|
fn from(value: &Movie) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id.clone(),
|
||||||
|
title: value.title.clone(),
|
||||||
|
description: value.description.clone(),
|
||||||
|
duration: value.duration.num_milliseconds(),
|
||||||
|
premium_only: value.is_premium_only,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatMusicVideo {
|
||||||
|
pub id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub duration: i64,
|
||||||
|
pub premium_only: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&MusicVideo> for FormatMusicVideo {
|
||||||
|
fn from(value: &MusicVideo) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id.clone(),
|
||||||
|
title: value.title.clone(),
|
||||||
|
description: value.description.clone(),
|
||||||
|
duration: value.duration.num_milliseconds(),
|
||||||
|
premium_only: value.is_premium_only,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatConcert {
|
||||||
|
pub id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub duration: i64,
|
||||||
|
pub premium_only: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Concert> for FormatConcert {
|
||||||
|
fn from(value: &Concert) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id.clone(),
|
||||||
|
title: value.title.clone(),
|
||||||
|
description: value.description.clone(),
|
||||||
|
duration: value.duration.num_milliseconds(),
|
||||||
|
premium_only: value.is_premium_only,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatStream {
|
||||||
|
pub locale: Locale,
|
||||||
|
pub dash_url: String,
|
||||||
|
pub is_drm: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Stream> for FormatStream {
|
||||||
|
fn from(value: &Stream) -> Self {
|
||||||
|
Self {
|
||||||
|
locale: value.audio_locale.clone(),
|
||||||
|
dash_url: value.url.clone(),
|
||||||
|
is_drm: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatSubtitle {
|
||||||
|
pub locale: Locale,
|
||||||
|
pub url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Subtitle> for FormatSubtitle {
|
||||||
|
fn from(value: &Subtitle) -> Self {
|
||||||
|
Self {
|
||||||
|
locale: value.locale.clone(),
|
||||||
|
url: value.url.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Serialize)]
|
||||||
|
struct FormatAccount {
|
||||||
|
pub token: String,
|
||||||
|
pub id: String,
|
||||||
|
pub profile_name: String,
|
||||||
|
pub email: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FormatAccount {
|
||||||
|
pub async fn async_from(value: &Crunchyroll) -> Result<Self> {
|
||||||
|
let account = value.account().await?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
token: value.access_token().await,
|
||||||
|
id: account.account_id,
|
||||||
|
profile_name: account.profile_name,
|
||||||
|
email: account.email,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||||
|
enum Scope {
|
||||||
|
Series,
|
||||||
|
Season,
|
||||||
|
Episode,
|
||||||
|
MovieListing,
|
||||||
|
Movie,
|
||||||
|
MusicVideo,
|
||||||
|
Concert,
|
||||||
|
Stream,
|
||||||
|
Subtitle,
|
||||||
|
Account,
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! must_match_if_true {
|
||||||
|
($condition:expr => $media_collection:ident | $field:pat => $expr:expr) => {
|
||||||
|
if $condition {
|
||||||
|
match &$media_collection {
|
||||||
|
$field => Some($expr),
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Format {
|
||||||
|
pattern: Vec<(Range<usize>, Scope, String)>,
|
||||||
|
pattern_count: HashMap<Scope, u32>,
|
||||||
|
input: String,
|
||||||
|
filter_options: FilterOptions,
|
||||||
|
crunchyroll: Arc<Crunchyroll>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Format {
|
||||||
|
pub fn new(
|
||||||
|
input: String,
|
||||||
|
filter_options: FilterOptions,
|
||||||
|
crunchyroll: Arc<Crunchyroll>,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let scope_regex = Regex::new(r"(?m)\{\{\s*(?P<scope>\w+)\.(?P<field>\w+)\s*}}").unwrap();
|
||||||
|
let mut pattern = vec![];
|
||||||
|
let mut pattern_count = HashMap::new();
|
||||||
|
|
||||||
|
macro_rules! generate_field_check {
|
||||||
|
($($scope:expr => $struct_:ident)+) => {
|
||||||
|
HashMap::from([
|
||||||
|
$(
|
||||||
|
(
|
||||||
|
$scope,
|
||||||
|
serde_json::from_value::<Map<String, Value>>(serde_json::to_value($struct_::default()).unwrap()).unwrap()
|
||||||
|
)
|
||||||
|
),+
|
||||||
|
])
|
||||||
|
};
|
||||||
|
}
|
||||||
|
let field_check = generate_field_check!(
|
||||||
|
Scope::Series => FormatSeries
|
||||||
|
Scope::Season => FormatSeason
|
||||||
|
Scope::Episode => FormatEpisode
|
||||||
|
Scope::MovieListing => FormatMovieListing
|
||||||
|
Scope::Movie => FormatMovie
|
||||||
|
Scope::MusicVideo => FormatMusicVideo
|
||||||
|
Scope::Concert => FormatConcert
|
||||||
|
Scope::Stream => FormatStream
|
||||||
|
Scope::Subtitle => FormatSubtitle
|
||||||
|
Scope::Account => FormatAccount
|
||||||
|
);
|
||||||
|
|
||||||
|
for capture in scope_regex.captures_iter(&input) {
|
||||||
|
let full = capture.get(0).unwrap();
|
||||||
|
let scope = capture.name("scope").unwrap().as_str();
|
||||||
|
let field = capture.name("field").unwrap().as_str();
|
||||||
|
|
||||||
|
let format_pattern_scope = match scope {
|
||||||
|
"series" => Scope::Series,
|
||||||
|
"season" => Scope::Season,
|
||||||
|
"episode" => Scope::Episode,
|
||||||
|
"movie_listing" => Scope::MovieListing,
|
||||||
|
"movie" => Scope::Movie,
|
||||||
|
"music_video" => Scope::MusicVideo,
|
||||||
|
"concert" => Scope::Concert,
|
||||||
|
"stream" => Scope::Stream,
|
||||||
|
"subtitle" => Scope::Subtitle,
|
||||||
|
"account" => Scope::Account,
|
||||||
|
_ => bail!("'{}.{}' is not a valid keyword", scope, field),
|
||||||
|
};
|
||||||
|
|
||||||
|
if field_check
|
||||||
|
.get(&format_pattern_scope)
|
||||||
|
.unwrap()
|
||||||
|
.get(field)
|
||||||
|
.is_none()
|
||||||
|
{
|
||||||
|
bail!("'{}.{}' is not a valid keyword", scope, field)
|
||||||
|
}
|
||||||
|
|
||||||
|
pattern.push((
|
||||||
|
full.start()..full.end(),
|
||||||
|
format_pattern_scope.clone(),
|
||||||
|
field.to_string(),
|
||||||
|
));
|
||||||
|
*pattern_count.entry(format_pattern_scope).or_default() += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
pattern,
|
||||||
|
pattern_count,
|
||||||
|
input,
|
||||||
|
filter_options,
|
||||||
|
crunchyroll,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn parse(&self, media_collection: MediaCollection) -> Result<String> {
|
||||||
|
match &media_collection {
|
||||||
|
MediaCollection::Series(_)
|
||||||
|
| MediaCollection::Season(_)
|
||||||
|
| MediaCollection::Episode(_) => {
|
||||||
|
self.check_scopes(vec![
|
||||||
|
Scope::Series,
|
||||||
|
Scope::Season,
|
||||||
|
Scope::Episode,
|
||||||
|
Scope::Stream,
|
||||||
|
Scope::Subtitle,
|
||||||
|
Scope::Account,
|
||||||
|
])?;
|
||||||
|
|
||||||
|
self.parse_series(media_collection).await
|
||||||
|
}
|
||||||
|
MediaCollection::MovieListing(_) | MediaCollection::Movie(_) => {
|
||||||
|
self.check_scopes(vec![
|
||||||
|
Scope::MovieListing,
|
||||||
|
Scope::Movie,
|
||||||
|
Scope::Stream,
|
||||||
|
Scope::Subtitle,
|
||||||
|
Scope::Account,
|
||||||
|
])?;
|
||||||
|
|
||||||
|
self.parse_movie_listing(media_collection).await
|
||||||
|
}
|
||||||
|
MediaCollection::MusicVideo(_) => {
|
||||||
|
self.check_scopes(vec![
|
||||||
|
Scope::MusicVideo,
|
||||||
|
Scope::Stream,
|
||||||
|
Scope::Subtitle,
|
||||||
|
Scope::Account,
|
||||||
|
])?;
|
||||||
|
|
||||||
|
self.parse_music_video(media_collection).await
|
||||||
|
}
|
||||||
|
MediaCollection::Concert(_) => {
|
||||||
|
self.check_scopes(vec![
|
||||||
|
Scope::Concert,
|
||||||
|
Scope::Stream,
|
||||||
|
Scope::Subtitle,
|
||||||
|
Scope::Account,
|
||||||
|
])?;
|
||||||
|
|
||||||
|
self.parse_concert(media_collection).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn parse_series(&self, media_collection: MediaCollection) -> Result<String> {
|
||||||
|
let series_empty = self.check_pattern_count_empty(Scope::Series);
|
||||||
|
let season_empty = self.check_pattern_count_empty(Scope::Season);
|
||||||
|
let episode_empty = self.check_pattern_count_empty(Scope::Episode);
|
||||||
|
let stream_empty = self.check_pattern_count_empty(Scope::Stream)
|
||||||
|
&& self.check_pattern_count_empty(Scope::Subtitle);
|
||||||
|
let account_empty = self.check_pattern_count_empty(Scope::Account);
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
let mut tree: Vec<(Season, Vec<(Episode, Vec<Stream>)>)> = vec![];
|
||||||
|
|
||||||
|
let series = if !series_empty {
|
||||||
|
let series = match &media_collection {
|
||||||
|
MediaCollection::Series(series) => series.clone(),
|
||||||
|
MediaCollection::Season(season) => season.series().await?,
|
||||||
|
MediaCollection::Episode(episode) => episode.series().await?,
|
||||||
|
_ => panic!(),
|
||||||
|
};
|
||||||
|
if !self.filter_options.check_series(&series) {
|
||||||
|
return Ok("".to_string());
|
||||||
|
}
|
||||||
|
series
|
||||||
|
} else {
|
||||||
|
Series::default()
|
||||||
|
};
|
||||||
|
if !season_empty || !episode_empty || !stream_empty {
|
||||||
|
let tmp_seasons = match &media_collection {
|
||||||
|
MediaCollection::Series(series) => series.seasons().await?,
|
||||||
|
MediaCollection::Season(season) => vec![season.clone()],
|
||||||
|
MediaCollection::Episode(_) => vec![],
|
||||||
|
_ => panic!(),
|
||||||
|
};
|
||||||
|
let mut seasons = vec![];
|
||||||
|
for season in tmp_seasons {
|
||||||
|
seasons.push(season.clone());
|
||||||
|
for version in season.versions {
|
||||||
|
if season.id == version.id {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if self.filter_options.audio.contains(&version.audio_locale) {
|
||||||
|
seasons.push(version.season().await?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tree.extend(
|
||||||
|
self.filter_options
|
||||||
|
.filter_seasons(seasons)
|
||||||
|
.into_iter()
|
||||||
|
.map(|s| (s, vec![])),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
tree.push((Season::default(), vec![]))
|
||||||
|
}
|
||||||
|
if !episode_empty || !stream_empty {
|
||||||
|
match &media_collection {
|
||||||
|
MediaCollection::Episode(episode) => {
|
||||||
|
let mut episodes = vec![episode.clone()];
|
||||||
|
for version in &episode.versions {
|
||||||
|
if episode.id == version.id {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if self.filter_options.audio.contains(&version.audio_locale) {
|
||||||
|
episodes.push(version.episode().await?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tree.push((
|
||||||
|
Season::default(),
|
||||||
|
episodes
|
||||||
|
.into_iter()
|
||||||
|
.filter(|e| self.filter_options.audio.contains(&e.audio_locale))
|
||||||
|
.map(|e| (e, vec![]))
|
||||||
|
.collect(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
for (season, episodes) in tree.iter_mut() {
|
||||||
|
episodes.extend(
|
||||||
|
self.filter_options
|
||||||
|
.filter_episodes(season.episodes().await?)
|
||||||
|
.into_iter()
|
||||||
|
.map(|e| (e, vec![])),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
for (_, episodes) in tree.iter_mut() {
|
||||||
|
episodes.push((Episode::default(), vec![]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !stream_empty {
|
||||||
|
for (_, episodes) in tree.iter_mut() {
|
||||||
|
for (episode, streams) in episodes {
|
||||||
|
let stream = episode.stream_maybe_without_drm().await?;
|
||||||
|
stream.clone().invalidate().await?;
|
||||||
|
streams.push(stream)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (_, episodes) in tree.iter_mut() {
|
||||||
|
for (_, streams) in episodes {
|
||||||
|
streams.push(Stream::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut output = vec![];
|
||||||
|
let account_map = if !account_empty {
|
||||||
|
self.serializable_to_json_map(FormatAccount::async_from(&self.crunchyroll).await?)
|
||||||
|
} else {
|
||||||
|
Map::default()
|
||||||
|
};
|
||||||
|
let series_map = self.serializable_to_json_map(FormatSeries::from(&series));
|
||||||
|
for (season, episodes) in tree {
|
||||||
|
let season_map = self.serializable_to_json_map(FormatSeason::from(&season));
|
||||||
|
for (episode, streams) in episodes {
|
||||||
|
let episode_map = self.serializable_to_json_map(FormatEpisode::from(&episode));
|
||||||
|
for stream in streams {
|
||||||
|
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
|
||||||
|
|
||||||
|
output.push(
|
||||||
|
self.replace_all(
|
||||||
|
HashMap::from([
|
||||||
|
(Scope::Account, &account_map),
|
||||||
|
(Scope::Series, &series_map),
|
||||||
|
(Scope::Season, &season_map),
|
||||||
|
(Scope::Episode, &episode_map),
|
||||||
|
(Scope::Stream, &stream_map),
|
||||||
|
]),
|
||||||
|
stream,
|
||||||
|
)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(output.join("\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn parse_movie_listing(&self, media_collection: MediaCollection) -> Result<String> {
|
||||||
|
let movie_listing_empty = self.check_pattern_count_empty(Scope::MovieListing);
|
||||||
|
let movie_empty = self.check_pattern_count_empty(Scope::Movie);
|
||||||
|
let stream_empty = self.check_pattern_count_empty(Scope::Stream);
|
||||||
|
|
||||||
|
let mut tree: Vec<(Movie, Vec<Stream>)> = vec![];
|
||||||
|
|
||||||
|
let movie_listing = if !movie_listing_empty {
|
||||||
|
let movie_listing = match &media_collection {
|
||||||
|
MediaCollection::MovieListing(movie_listing) => movie_listing.clone(),
|
||||||
|
MediaCollection::Movie(movie) => movie.movie_listing().await?,
|
||||||
|
_ => panic!(),
|
||||||
|
};
|
||||||
|
if !self.filter_options.check_movie_listing(&movie_listing) {
|
||||||
|
return Ok("".to_string());
|
||||||
|
}
|
||||||
|
movie_listing
|
||||||
|
} else {
|
||||||
|
MovieListing::default()
|
||||||
|
};
|
||||||
|
if !movie_empty || !stream_empty {
|
||||||
|
let movies = match &media_collection {
|
||||||
|
MediaCollection::MovieListing(movie_listing) => movie_listing.movies().await?,
|
||||||
|
MediaCollection::Movie(movie) => vec![movie.clone()],
|
||||||
|
_ => panic!(),
|
||||||
|
};
|
||||||
|
tree.extend(movies.into_iter().map(|m| (m, vec![])))
|
||||||
|
}
|
||||||
|
if !stream_empty {
|
||||||
|
for (movie, streams) in tree.iter_mut() {
|
||||||
|
streams.push(movie.stream_maybe_without_drm().await?)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (_, streams) in tree.iter_mut() {
|
||||||
|
streams.push(Stream::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut output = vec![];
|
||||||
|
let movie_listing_map =
|
||||||
|
self.serializable_to_json_map(FormatMovieListing::from(&movie_listing));
|
||||||
|
for (movie, streams) in tree {
|
||||||
|
let movie_map = self.serializable_to_json_map(FormatMovie::from(&movie));
|
||||||
|
for stream in streams {
|
||||||
|
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
|
||||||
|
|
||||||
|
output.push(
|
||||||
|
self.replace_all(
|
||||||
|
HashMap::from([
|
||||||
|
(Scope::MovieListing, &movie_listing_map),
|
||||||
|
(Scope::Movie, &movie_map),
|
||||||
|
(Scope::Stream, &stream_map),
|
||||||
|
]),
|
||||||
|
stream,
|
||||||
|
)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(output.join("\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn parse_music_video(&self, media_collection: MediaCollection) -> Result<String> {
|
||||||
|
let music_video_empty = self.check_pattern_count_empty(Scope::MusicVideo);
|
||||||
|
let stream_empty = self.check_pattern_count_empty(Scope::Stream);
|
||||||
|
|
||||||
|
let music_video = must_match_if_true!(!music_video_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.clone()).unwrap_or_default();
|
||||||
|
let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.stream_maybe_without_drm().await?).unwrap_or_default();
|
||||||
|
|
||||||
|
let music_video_map = self.serializable_to_json_map(FormatMusicVideo::from(&music_video));
|
||||||
|
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
|
||||||
|
|
||||||
|
let output = self
|
||||||
|
.replace_all(
|
||||||
|
HashMap::from([
|
||||||
|
(Scope::MusicVideo, &music_video_map),
|
||||||
|
(Scope::Stream, &stream_map),
|
||||||
|
]),
|
||||||
|
stream,
|
||||||
|
)
|
||||||
|
.unwrap_or_default();
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn parse_concert(&self, media_collection: MediaCollection) -> Result<String> {
|
||||||
|
let concert_empty = self.check_pattern_count_empty(Scope::Concert);
|
||||||
|
let stream_empty = self.check_pattern_count_empty(Scope::Stream);
|
||||||
|
|
||||||
|
let concert = must_match_if_true!(!concert_empty => media_collection|MediaCollection::Concert(concert) => concert.clone()).unwrap_or_default();
|
||||||
|
let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::Concert(concert) => concert.stream_maybe_without_drm().await?).unwrap_or_default();
|
||||||
|
|
||||||
|
let concert_map = self.serializable_to_json_map(FormatConcert::from(&concert));
|
||||||
|
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
|
||||||
|
|
||||||
|
let output = self
|
||||||
|
.replace_all(
|
||||||
|
HashMap::from([(Scope::Concert, &concert_map), (Scope::Stream, &stream_map)]),
|
||||||
|
stream,
|
||||||
|
)
|
||||||
|
.unwrap_or_default();
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serializable_to_json_map<S: Serialize>(&self, s: S) -> Map<String, Value> {
|
||||||
|
serde_json::from_value(serde_json::to_value(s).unwrap()).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_pattern_count_empty(&self, scope: Scope) -> bool {
|
||||||
|
self.pattern_count.get(&scope).cloned().unwrap_or_default() == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_scopes(&self, available_scopes: Vec<Scope>) -> Result<()> {
|
||||||
|
for (_, scope, field) in self.pattern.iter() {
|
||||||
|
if !available_scopes.contains(scope) {
|
||||||
|
bail!(
|
||||||
|
"'{}.{}' is not a valid keyword",
|
||||||
|
format!("{:?}", scope).to_lowercase(),
|
||||||
|
field
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn replace_all(
|
||||||
|
&self,
|
||||||
|
values: HashMap<Scope, &Map<String, Value>>,
|
||||||
|
mut stream: Stream,
|
||||||
|
) -> Option<String> {
|
||||||
|
if stream.subtitles.is_empty() {
|
||||||
|
if !self.check_pattern_count_empty(Scope::Subtitle) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
stream
|
||||||
|
.subtitles
|
||||||
|
.insert(Locale::Custom("".to_string()), Subtitle::default());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut output = vec![];
|
||||||
|
for (_, subtitle) in stream.subtitles {
|
||||||
|
let subtitle_map = self.serializable_to_json_map(FormatSubtitle::from(&subtitle));
|
||||||
|
let mut tmp_values = values.clone();
|
||||||
|
tmp_values.insert(Scope::Subtitle, &subtitle_map);
|
||||||
|
output.push(self.replace(tmp_values))
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(output.join("\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn replace(&self, values: HashMap<Scope, &Map<String, Value>>) -> String {
|
||||||
|
let mut output = self.input.clone();
|
||||||
|
let mut offset = 0;
|
||||||
|
for (range, scope, field) in &self.pattern {
|
||||||
|
let item =
|
||||||
|
serde_plain::to_string(values.get(scope).unwrap().get(field.as_str()).unwrap())
|
||||||
|
.unwrap();
|
||||||
|
let start = (range.start as i32 + offset) as usize;
|
||||||
|
let end = (range.end as i32 + offset) as usize;
|
||||||
|
output.replace_range(start..end, &item);
|
||||||
|
offset += item.len() as i32 - range.len() as i32;
|
||||||
|
}
|
||||||
|
|
||||||
|
output
|
||||||
|
}
|
||||||
|
}
|
||||||
5
crunchy-cli-core/src/search/mod.rs
Normal file
5
crunchy-cli-core/src/search/mod.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
mod command;
|
||||||
|
mod filter;
|
||||||
|
mod format;
|
||||||
|
|
||||||
|
pub use command::Search;
|
||||||
|
|
@ -1,11 +1,61 @@
|
||||||
use crate::utils::parse::parse_resolution;
|
use crate::utils::parse::parse_resolution;
|
||||||
use crunchyroll_rs::media::Resolution;
|
use crunchyroll_rs::media::Resolution;
|
||||||
|
use regex::Regex;
|
||||||
use reqwest::Proxy;
|
use reqwest::Proxy;
|
||||||
|
|
||||||
pub fn clap_parse_resolution(s: &str) -> Result<Resolution, String> {
|
pub fn clap_parse_resolution(s: &str) -> Result<Resolution, String> {
|
||||||
parse_resolution(s.to_string()).map_err(|e| e.to_string())
|
parse_resolution(s.to_string()).map_err(|e| e.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clap_parse_proxy(s: &str) -> Result<Proxy, String> {
|
pub fn clap_parse_proxies(s: &str) -> Result<(Option<Proxy>, Option<Proxy>), String> {
|
||||||
Proxy::all(s).map_err(|e| e.to_string())
|
let double_proxy_regex =
|
||||||
|
Regex::new(r"^(?P<first>(https?|socks5h?)://.+):(?P<second>(https?|socks5h?)://.+)$")
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if let Some(capture) = double_proxy_regex.captures(s) {
|
||||||
|
// checks if the input is formatted like 'https://example.com:socks5://examples.com' and
|
||||||
|
// splits the string into 2 separate proxies at the middle colon
|
||||||
|
|
||||||
|
let first = capture.name("first").unwrap().as_str();
|
||||||
|
let second = capture.name("second").unwrap().as_str();
|
||||||
|
Ok((
|
||||||
|
Some(Proxy::all(first).map_err(|e| format!("first proxy: {e}"))?),
|
||||||
|
Some(Proxy::all(second).map_err(|e| format!("second proxy: {e}"))?),
|
||||||
|
))
|
||||||
|
} else if s.starts_with(':') {
|
||||||
|
// checks if the input is formatted like ':https://example.com' and returns a proxy on the
|
||||||
|
// second tuple position
|
||||||
|
Ok((
|
||||||
|
None,
|
||||||
|
Some(Proxy::all(s.trim_start_matches(':')).map_err(|e| e.to_string())?),
|
||||||
|
))
|
||||||
|
} else if s.ends_with(':') {
|
||||||
|
// checks if the input is formatted like 'https://example.com:' and returns a proxy on the
|
||||||
|
// first tuple position
|
||||||
|
Ok((
|
||||||
|
Some(Proxy::all(s.trim_end_matches(':')).map_err(|e| e.to_string())?),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
// returns the same proxy for both tuple positions
|
||||||
|
let proxy = Proxy::all(s).map_err(|e| e.to_string())?;
|
||||||
|
Ok((Some(proxy.clone()), Some(proxy)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clap_parse_speed_limit(s: &str) -> Result<u32, String> {
|
||||||
|
let quota = s.to_lowercase();
|
||||||
|
|
||||||
|
let bytes = if let Ok(b) = quota.parse() {
|
||||||
|
b
|
||||||
|
} else if let Ok(b) = quota.trim_end_matches('b').parse::<u32>() {
|
||||||
|
b
|
||||||
|
} else if let Ok(kb) = quota.trim_end_matches("kb").parse::<u32>() {
|
||||||
|
kb * 1024
|
||||||
|
} else if let Ok(mb) = quota.trim_end_matches("mb").parse::<u32>() {
|
||||||
|
mb * 1024 * 1024
|
||||||
|
} else {
|
||||||
|
return Err("Invalid speed limit".to_string());
|
||||||
|
};
|
||||||
|
Ok(bytes)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,9 @@
|
||||||
|
use crate::utils::rate_limit::RateLimiterService;
|
||||||
use crunchyroll_rs::Crunchyroll;
|
use crunchyroll_rs::Crunchyroll;
|
||||||
|
use reqwest::Client;
|
||||||
|
|
||||||
pub struct Context {
|
pub struct Context {
|
||||||
pub crunchy: Crunchyroll,
|
pub crunchy: Crunchyroll,
|
||||||
|
pub client: Client,
|
||||||
|
pub rate_limiter: Option<RateLimiterService>,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,12 +1,15 @@
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::env;
|
use std::fmt;
|
||||||
|
use std::fmt::Formatter;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
pub const SOFTSUB_CONTAINERS: [&str; 3] = ["mkv", "mov", "mp4"];
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
pub enum FFmpegPreset {
|
pub enum FFmpegPreset {
|
||||||
Predefined(FFmpegCodec, Option<FFmpegHwAccel>, FFmpegQuality),
|
Predefined(FFmpegCodec, Option<FFmpegHwAccel>, FFmpegQuality),
|
||||||
Custom(Option<String>, Option<String>),
|
Custom(Option<String>),
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
|
@ -32,11 +35,11 @@ macro_rules! ffmpeg_enum {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToString for $name {
|
impl fmt::Display for $name {
|
||||||
fn to_string(&self) -> String {
|
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
$(
|
$(
|
||||||
&$name::$field => stringify!($field).to_string().to_lowercase()
|
&$name::$field => write!(f, "{}", stringify!($field).to_string().to_lowercase())
|
||||||
),*
|
),*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -67,7 +70,9 @@ ffmpeg_enum! {
|
||||||
|
|
||||||
ffmpeg_enum! {
|
ffmpeg_enum! {
|
||||||
enum FFmpegHwAccel {
|
enum FFmpegHwAccel {
|
||||||
Nvidia
|
Nvidia,
|
||||||
|
Amd,
|
||||||
|
Apple
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -81,7 +86,7 @@ ffmpeg_enum! {
|
||||||
|
|
||||||
impl Default for FFmpegPreset {
|
impl Default for FFmpegPreset {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self::Custom(None, Some("-c:v copy -c:a copy".to_string()))
|
Self::Custom(Some("-c:v copy -c:a copy".to_string()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -99,7 +104,11 @@ impl FFmpegPreset {
|
||||||
FFmpegHwAccel::all(),
|
FFmpegHwAccel::all(),
|
||||||
FFmpegQuality::all(),
|
FFmpegQuality::all(),
|
||||||
),
|
),
|
||||||
(FFmpegCodec::Av1, vec![], FFmpegQuality::all()),
|
(
|
||||||
|
FFmpegCodec::Av1,
|
||||||
|
vec![FFmpegHwAccel::Amd],
|
||||||
|
FFmpegQuality::all(),
|
||||||
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
let mut return_values = vec![];
|
let mut return_values = vec![];
|
||||||
|
|
@ -128,23 +137,16 @@ impl FFmpegPreset {
|
||||||
for (codec, hwaccel, quality) in FFmpegPreset::available_matches() {
|
for (codec, hwaccel, quality) in FFmpegPreset::available_matches() {
|
||||||
let mut description_details = vec![];
|
let mut description_details = vec![];
|
||||||
if let Some(h) = &hwaccel {
|
if let Some(h) = &hwaccel {
|
||||||
description_details.push(format!("{} hardware acceleration", h.to_string()))
|
description_details.push(format!("{h} hardware acceleration"))
|
||||||
}
|
}
|
||||||
if let Some(q) = &quality {
|
if let Some(q) = &quality {
|
||||||
description_details.push(format!("{} video quality/compression", q.to_string()))
|
description_details.push(format!("{q} video quality/compression"))
|
||||||
}
|
}
|
||||||
|
|
||||||
let description = if description_details.len() == 0 {
|
let description = if description_details.is_empty() {
|
||||||
format!(
|
format!("{codec} encoded with default video quality/compression",)
|
||||||
"{} encoded with default video quality/compression",
|
|
||||||
codec.to_string()
|
|
||||||
)
|
|
||||||
} else if description_details.len() == 1 {
|
} else if description_details.len() == 1 {
|
||||||
format!(
|
format!("{} encoded with {}", codec, description_details[0])
|
||||||
"{} encoded with {}",
|
|
||||||
codec.to_string(),
|
|
||||||
description_details[0]
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
let first = description_details.remove(0);
|
let first = description_details.remove(0);
|
||||||
let last = description_details.remove(description_details.len() - 1);
|
let last = description_details.remove(description_details.len() - 1);
|
||||||
|
|
@ -154,13 +156,7 @@ impl FFmpegPreset {
|
||||||
"".to_string()
|
"".to_string()
|
||||||
};
|
};
|
||||||
|
|
||||||
format!(
|
format!("{codec} encoded with {first}{mid} and {last}",)
|
||||||
"{} encoded with {}{} and {}",
|
|
||||||
codec.to_string(),
|
|
||||||
first,
|
|
||||||
mid,
|
|
||||||
last
|
|
||||||
)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return_values.push(format!(
|
return_values.push(format!(
|
||||||
|
|
@ -181,27 +177,8 @@ impl FFmpegPreset {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse(s: &str) -> Result<FFmpegPreset, String> {
|
pub(crate) fn parse(s: &str) -> Result<FFmpegPreset, String> {
|
||||||
let env_ffmpeg_input_args = env::var("FFMPEG_INPUT_ARGS").ok();
|
if !PREDEFINED_PRESET.is_match(s) {
|
||||||
let env_ffmpeg_output_args = env::var("FFMPEG_OUTPUT_ARGS").ok();
|
return Ok(FFmpegPreset::Custom(Some(s.to_string())));
|
||||||
|
|
||||||
if env_ffmpeg_input_args.is_some() || env_ffmpeg_output_args.is_some() {
|
|
||||||
if let Some(input) = &env_ffmpeg_input_args {
|
|
||||||
if shlex::split(input).is_none() {
|
|
||||||
return Err(format!("Failed to find custom ffmpeg input '{}' (`FFMPEG_INPUT_ARGS` env variable)", input));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(output) = &env_ffmpeg_output_args {
|
|
||||||
if shlex::split(output).is_none() {
|
|
||||||
return Err(format!("Failed to find custom ffmpeg output '{}' (`FFMPEG_INPUT_ARGS` env variable)", output));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(FFmpegPreset::Custom(
|
|
||||||
env_ffmpeg_input_args,
|
|
||||||
env_ffmpeg_output_args,
|
|
||||||
));
|
|
||||||
} else if !PREDEFINED_PRESET.is_match(s) {
|
|
||||||
return Ok(FFmpegPreset::Custom(None, Some(s.to_string())));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut codec: Option<FFmpegCodec> = None;
|
let mut codec: Option<FFmpegCodec> = None;
|
||||||
|
|
@ -213,11 +190,7 @@ impl FFmpegPreset {
|
||||||
.find(|p| p.to_string() == token.to_lowercase())
|
.find(|p| p.to_string() == token.to_lowercase())
|
||||||
{
|
{
|
||||||
if let Some(cc) = codec {
|
if let Some(cc) = codec {
|
||||||
return Err(format!(
|
return Err(format!("cannot use multiple codecs (found {cc} and {c})",));
|
||||||
"cannot use multiple codecs (found {} and {})",
|
|
||||||
cc.to_string(),
|
|
||||||
c.to_string()
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
codec = Some(c)
|
codec = Some(c)
|
||||||
} else if let Some(h) = FFmpegHwAccel::all()
|
} else if let Some(h) = FFmpegHwAccel::all()
|
||||||
|
|
@ -226,9 +199,7 @@ impl FFmpegPreset {
|
||||||
{
|
{
|
||||||
if let Some(hh) = hwaccel {
|
if let Some(hh) = hwaccel {
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
"cannot use multiple hardware accelerations (found {} and {})",
|
"cannot use multiple hardware accelerations (found {hh} and {h})",
|
||||||
hh.to_string(),
|
|
||||||
h.to_string()
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
hwaccel = Some(h)
|
hwaccel = Some(h)
|
||||||
|
|
@ -238,15 +209,13 @@ impl FFmpegPreset {
|
||||||
{
|
{
|
||||||
if let Some(qq) = quality {
|
if let Some(qq) = quality {
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
"cannot use multiple ffmpeg preset qualities (found {} and {})",
|
"cannot use multiple ffmpeg preset qualities (found {qq} and {q})",
|
||||||
qq.to_string(),
|
|
||||||
q.to_string()
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
quality = Some(q)
|
quality = Some(q)
|
||||||
} else {
|
} else {
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
"'{}' is not a valid ffmpeg preset (unknown token '{}'",
|
"'{}' is not a valid ffmpeg preset (unknown token '{}')",
|
||||||
s, token
|
s, token
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
@ -258,7 +227,7 @@ impl FFmpegPreset {
|
||||||
hwaccel.clone(),
|
hwaccel.clone(),
|
||||||
quality.clone(),
|
quality.clone(),
|
||||||
)) {
|
)) {
|
||||||
return Err(format!("ffmpeg preset is not supported"));
|
return Err("ffmpeg preset is not supported".to_string());
|
||||||
}
|
}
|
||||||
Ok(FFmpegPreset::Predefined(
|
Ok(FFmpegPreset::Predefined(
|
||||||
c,
|
c,
|
||||||
|
|
@ -266,14 +235,14 @@ impl FFmpegPreset {
|
||||||
quality.unwrap_or(FFmpegQuality::Normal),
|
quality.unwrap_or(FFmpegQuality::Normal),
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
Err(format!("cannot use ffmpeg preset with without a codec"))
|
Err("cannot use ffmpeg preset with without a codec".to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn into_input_output_args(self) -> (Vec<String>, Vec<String>) {
|
pub(crate) fn into_input_output_args(self) -> (Vec<String>, Vec<String>) {
|
||||||
match self {
|
match self {
|
||||||
FFmpegPreset::Custom(input, output) => (
|
FFmpegPreset::Custom(output) => (
|
||||||
input.map_or(vec![], |i| shlex::split(&i).unwrap_or_default()),
|
vec![],
|
||||||
output.map_or(vec![], |o| shlex::split(&o).unwrap_or_default()),
|
output.map_or(vec![], |o| shlex::split(&o).unwrap_or_default()),
|
||||||
),
|
),
|
||||||
FFmpegPreset::Predefined(codec, hwaccel_opt, quality) => {
|
FFmpegPreset::Predefined(codec, hwaccel_opt, quality) => {
|
||||||
|
|
@ -282,31 +251,12 @@ impl FFmpegPreset {
|
||||||
|
|
||||||
match codec {
|
match codec {
|
||||||
FFmpegCodec::H264 => {
|
FFmpegCodec::H264 => {
|
||||||
if let Some(hwaccel) = hwaccel_opt {
|
let mut crf_quality = || match quality {
|
||||||
match hwaccel {
|
|
||||||
FFmpegHwAccel::Nvidia => {
|
|
||||||
input.extend([
|
|
||||||
"-hwaccel",
|
|
||||||
"cuda",
|
|
||||||
"-hwaccel_output_format",
|
|
||||||
"cuda",
|
|
||||||
"-c:v",
|
|
||||||
"h264_cuvid",
|
|
||||||
]);
|
|
||||||
output.extend(["-c:v", "h264_nvenc", "-c:a", "copy"])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
output.extend(["-c:v", "libx264", "-c:a", "copy"])
|
|
||||||
}
|
|
||||||
|
|
||||||
match quality {
|
|
||||||
FFmpegQuality::Lossless => output.extend(["-crf", "18"]),
|
FFmpegQuality::Lossless => output.extend(["-crf", "18"]),
|
||||||
FFmpegQuality::Normal => (),
|
FFmpegQuality::Normal => (),
|
||||||
FFmpegQuality::Low => output.extend(["-crf", "35"]),
|
FFmpegQuality::Low => output.extend(["-crf", "35"]),
|
||||||
}
|
};
|
||||||
}
|
|
||||||
FFmpegCodec::H265 => {
|
|
||||||
if let Some(hwaccel) = hwaccel_opt {
|
if let Some(hwaccel) = hwaccel_opt {
|
||||||
match hwaccel {
|
match hwaccel {
|
||||||
FFmpegHwAccel::Nvidia => {
|
FFmpegHwAccel::Nvidia => {
|
||||||
|
|
@ -318,26 +268,99 @@ impl FFmpegPreset {
|
||||||
"-c:v",
|
"-c:v",
|
||||||
"h264_cuvid",
|
"h264_cuvid",
|
||||||
]);
|
]);
|
||||||
output.extend(["-c:v", "hevc_nvenc", "-c:a", "copy"])
|
crf_quality();
|
||||||
|
output.extend(["-c:v", "h264_nvenc", "-c:a", "copy"])
|
||||||
|
}
|
||||||
|
FFmpegHwAccel::Amd => {
|
||||||
|
crf_quality();
|
||||||
|
output.extend(["-c:v", "h264_amf", "-c:a", "copy"])
|
||||||
|
}
|
||||||
|
FFmpegHwAccel::Apple => {
|
||||||
|
// Apple's Video Toolbox encoders ignore `-crf`, use `-q:v`
|
||||||
|
// instead. It's on a scale of 1-100, 100 being lossless. Just
|
||||||
|
// did some math ((-a/51+1)*99+1 where `a` is the old crf value)
|
||||||
|
// so these settings very likely need some more tweaking
|
||||||
|
match quality {
|
||||||
|
FFmpegQuality::Lossless => output.extend(["-q:v", "65"]),
|
||||||
|
FFmpegQuality::Normal => (),
|
||||||
|
FFmpegQuality::Low => output.extend(["-q:v", "32"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
output.extend(["-c:v", "h264_videotoolbox", "-c:a", "copy"])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
output.extend(["-c:v", "libx265", "-c:a", "copy"])
|
crf_quality();
|
||||||
|
output.extend(["-c:v", "libx264", "-c:a", "copy"])
|
||||||
}
|
}
|
||||||
|
}
|
||||||
match quality {
|
FFmpegCodec::H265 => {
|
||||||
|
let mut crf_quality = || match quality {
|
||||||
FFmpegQuality::Lossless => output.extend(["-crf", "20"]),
|
FFmpegQuality::Lossless => output.extend(["-crf", "20"]),
|
||||||
FFmpegQuality::Normal => (),
|
FFmpegQuality::Normal => (),
|
||||||
FFmpegQuality::Low => output.extend(["-crf", "35"]),
|
FFmpegQuality::Low => output.extend(["-crf", "35"]),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(hwaccel) = hwaccel_opt {
|
||||||
|
match hwaccel {
|
||||||
|
FFmpegHwAccel::Nvidia => {
|
||||||
|
input.extend([
|
||||||
|
"-hwaccel",
|
||||||
|
"cuda",
|
||||||
|
"-hwaccel_output_format",
|
||||||
|
"cuda",
|
||||||
|
"-c:v",
|
||||||
|
"h264_cuvid",
|
||||||
|
]);
|
||||||
|
crf_quality();
|
||||||
|
output.extend([
|
||||||
|
"-c:v",
|
||||||
|
"hevc_nvenc",
|
||||||
|
"-c:a",
|
||||||
|
"copy",
|
||||||
|
"-tag:v",
|
||||||
|
"hvc1",
|
||||||
|
])
|
||||||
|
}
|
||||||
|
FFmpegHwAccel::Amd => {
|
||||||
|
crf_quality();
|
||||||
|
output.extend(["-c:v", "hevc_amf", "-c:a", "copy"])
|
||||||
|
}
|
||||||
|
FFmpegHwAccel::Apple => {
|
||||||
|
// See the comment for apple h264 hwaccel
|
||||||
|
match quality {
|
||||||
|
FFmpegQuality::Lossless => output.extend(["-q:v", "61"]),
|
||||||
|
FFmpegQuality::Normal => (),
|
||||||
|
FFmpegQuality::Low => output.extend(["-q:v", "32"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
output.extend([
|
||||||
|
"-c:v",
|
||||||
|
"hevc_videotoolbox",
|
||||||
|
"-c:a",
|
||||||
|
"copy",
|
||||||
|
"-tag:v",
|
||||||
|
"hvc1",
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
crf_quality();
|
||||||
|
output.extend(["-c:v", "libx265", "-c:a", "copy", "-tag:v", "hvc1"])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FFmpegCodec::Av1 => {
|
FFmpegCodec::Av1 => {
|
||||||
output.extend(["-c:v", "libsvtav1", "-c:a", "copy"]);
|
let mut crf_quality = || match quality {
|
||||||
|
|
||||||
match quality {
|
|
||||||
FFmpegQuality::Lossless => output.extend(["-crf", "22"]),
|
FFmpegQuality::Lossless => output.extend(["-crf", "22"]),
|
||||||
FFmpegQuality::Normal => (),
|
FFmpegQuality::Normal => (),
|
||||||
FFmpegQuality::Low => output.extend(["-crf", "35"]),
|
FFmpegQuality::Low => output.extend(["-crf", "35"]),
|
||||||
|
};
|
||||||
|
|
||||||
|
crf_quality();
|
||||||
|
if let Some(FFmpegHwAccel::Amd) = hwaccel_opt {
|
||||||
|
output.extend(["-c:v", "av1_amf", "-c:a", "copy"]);
|
||||||
|
} else {
|
||||||
|
output.extend(["-c:v", "libsvtav1", "-c:a", "copy"]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,27 +1,407 @@
|
||||||
|
use crate::utils::format::{SingleFormat, SingleFormatCollection};
|
||||||
|
use crate::utils::interactive_select::{check_for_duplicated_seasons, get_duplicated_seasons};
|
||||||
|
use crate::utils::parse::{fract, UrlFilter};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use crunchyroll_rs::{
|
use crunchyroll_rs::{
|
||||||
Concert, Episode, MediaCollection, Movie, MovieListing, MusicVideo, Season, Series,
|
Concert, Episode, Locale, MediaCollection, Movie, MovieListing, MusicVideo, Season, Series,
|
||||||
};
|
};
|
||||||
|
use log::{info, warn};
|
||||||
|
use std::collections::{BTreeMap, HashMap};
|
||||||
|
use std::ops::Not;
|
||||||
|
|
||||||
// Check when https://github.com/dtolnay/async-trait/issues/224 is resolved and update async-trait
|
pub(crate) enum FilterMediaScope<'a> {
|
||||||
// to the new fixed version (as this causes some issues)
|
Series(&'a Series),
|
||||||
#[async_trait::async_trait]
|
Season(&'a Season),
|
||||||
pub trait Filter {
|
/// Always contains 1 or 2 episodes.
|
||||||
type T: Send + Sized;
|
/// - 1: The episode's audio is completely missing
|
||||||
type Output: Send + Sized;
|
/// - 2: The requested audio is only available from first entry to last entry
|
||||||
|
Episode(Vec<&'a Episode>),
|
||||||
|
}
|
||||||
|
|
||||||
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>>;
|
pub(crate) struct Filter {
|
||||||
async fn visit_season(&mut self, season: Season) -> Result<Vec<Episode>>;
|
url_filter: UrlFilter,
|
||||||
async fn visit_episode(&mut self, episode: Episode) -> Result<Option<Self::T>>;
|
|
||||||
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>>;
|
|
||||||
async fn visit_movie(&mut self, movie: Movie) -> Result<Option<Self::T>>;
|
|
||||||
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Option<Self::T>>;
|
|
||||||
async fn visit_concert(&mut self, concert: Concert) -> Result<Option<Self::T>>;
|
|
||||||
|
|
||||||
async fn visit(mut self, media_collection: MediaCollection) -> Result<Self::Output>
|
skip_specials: bool,
|
||||||
where
|
interactive_input: bool,
|
||||||
Self: Send + Sized,
|
|
||||||
{
|
relative_episode_number: bool,
|
||||||
|
|
||||||
|
audio_locales: Vec<Locale>,
|
||||||
|
subtitle_locales: Vec<Locale>,
|
||||||
|
|
||||||
|
audios_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
|
||||||
|
subtitles_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
|
||||||
|
no_premium: fn(u32) -> Result<()>,
|
||||||
|
|
||||||
|
is_premium: bool,
|
||||||
|
|
||||||
|
series_visited: bool,
|
||||||
|
season_episodes: HashMap<String, Vec<Episode>>,
|
||||||
|
season_with_premium: Option<Vec<u32>>,
|
||||||
|
season_sorting: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Filter {
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
pub(crate) fn new(
|
||||||
|
url_filter: UrlFilter,
|
||||||
|
audio_locales: Vec<Locale>,
|
||||||
|
subtitle_locales: Vec<Locale>,
|
||||||
|
audios_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
|
||||||
|
subtitles_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
|
||||||
|
no_premium: fn(u32) -> Result<()>,
|
||||||
|
relative_episode_number: bool,
|
||||||
|
interactive_input: bool,
|
||||||
|
skip_specials: bool,
|
||||||
|
is_premium: bool,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
url_filter,
|
||||||
|
audio_locales,
|
||||||
|
subtitle_locales,
|
||||||
|
relative_episode_number,
|
||||||
|
interactive_input,
|
||||||
|
audios_missing,
|
||||||
|
subtitles_missing,
|
||||||
|
no_premium,
|
||||||
|
is_premium,
|
||||||
|
series_visited: false,
|
||||||
|
season_episodes: HashMap::new(),
|
||||||
|
skip_specials,
|
||||||
|
season_with_premium: is_premium.not().then_some(vec![]),
|
||||||
|
season_sorting: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>> {
|
||||||
|
// the audio locales field isn't always populated
|
||||||
|
if !series.audio_locales.is_empty() {
|
||||||
|
let missing_audios = missing_locales(&series.audio_locales, &self.audio_locales);
|
||||||
|
if !missing_audios.is_empty()
|
||||||
|
&& !(self.audios_missing)(FilterMediaScope::Series(&series), missing_audios)?
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
let missing_subtitles =
|
||||||
|
missing_locales(&series.subtitle_locales, &self.subtitle_locales);
|
||||||
|
if !missing_subtitles.is_empty()
|
||||||
|
&& !(self.subtitles_missing)(FilterMediaScope::Series(&series), missing_subtitles)?
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut seasons = vec![];
|
||||||
|
for season in series.seasons().await? {
|
||||||
|
if !self.url_filter.is_season_valid(season.season_number) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let missing_audios = missing_locales(
|
||||||
|
&season
|
||||||
|
.versions
|
||||||
|
.iter()
|
||||||
|
.map(|l| l.audio_locale.clone())
|
||||||
|
.collect::<Vec<Locale>>(),
|
||||||
|
&self.audio_locales,
|
||||||
|
);
|
||||||
|
if !missing_audios.is_empty()
|
||||||
|
&& !(self.audios_missing)(FilterMediaScope::Season(&season), missing_audios)?
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
seasons.push(season)
|
||||||
|
}
|
||||||
|
|
||||||
|
let duplicated_seasons = get_duplicated_seasons(&seasons);
|
||||||
|
if !duplicated_seasons.is_empty() {
|
||||||
|
if self.interactive_input {
|
||||||
|
check_for_duplicated_seasons(&mut seasons)
|
||||||
|
} else {
|
||||||
|
info!(
|
||||||
|
"Found duplicated seasons: {}",
|
||||||
|
duplicated_seasons
|
||||||
|
.iter()
|
||||||
|
.map(|d| d.to_string())
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", ")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.series_visited = true;
|
||||||
|
|
||||||
|
Ok(seasons)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn visit_season(&mut self, season: Season) -> Result<Vec<Episode>> {
|
||||||
|
if !self.url_filter.is_season_valid(season.season_number) {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut seasons = vec![];
|
||||||
|
if self
|
||||||
|
.audio_locales
|
||||||
|
.iter()
|
||||||
|
.any(|l| season.audio_locales.contains(l))
|
||||||
|
{
|
||||||
|
seasons.push(season.clone())
|
||||||
|
}
|
||||||
|
for version in season.versions {
|
||||||
|
if season.id == version.id {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if self.audio_locales.contains(&version.audio_locale) {
|
||||||
|
seasons.push(version.season().await?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut episodes = vec![];
|
||||||
|
for season in seasons {
|
||||||
|
self.season_sorting.push(season.id.clone());
|
||||||
|
let mut eps = season.episodes().await?;
|
||||||
|
|
||||||
|
// removes any episode that does not have the audio locale of the season. yes, this is
|
||||||
|
// the case sometimes
|
||||||
|
if season.audio_locales.len() < 2 {
|
||||||
|
let season_locale = season
|
||||||
|
.audio_locales
|
||||||
|
.first()
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or(Locale::ja_JP);
|
||||||
|
eps.retain(|e| e.audio_locale == season_locale)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::if_same_then_else)]
|
||||||
|
if eps.len() < season.number_of_episodes as usize {
|
||||||
|
if eps.is_empty()
|
||||||
|
&& !(self.audios_missing)(
|
||||||
|
FilterMediaScope::Season(&season),
|
||||||
|
season.audio_locales.iter().collect(),
|
||||||
|
)?
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
} else if !eps.is_empty()
|
||||||
|
&& !(self.audios_missing)(
|
||||||
|
FilterMediaScope::Episode(vec![eps.first().unwrap(), eps.last().unwrap()]),
|
||||||
|
vec![&eps.first().unwrap().audio_locale],
|
||||||
|
)?
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
episodes.extend(eps)
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.relative_episode_number {
|
||||||
|
for episode in &episodes {
|
||||||
|
self.season_episodes
|
||||||
|
.entry(episode.season_id.clone())
|
||||||
|
.or_default()
|
||||||
|
.push(episode.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(episodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn visit_episode(&mut self, episode: Episode) -> Result<Vec<SingleFormat>> {
|
||||||
|
if !self
|
||||||
|
.url_filter
|
||||||
|
.is_episode_valid(episode.sequence_number, episode.season_number)
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip the episode if it's a special
|
||||||
|
if self.skip_specials
|
||||||
|
&& (episode.sequence_number == 0.0 || episode.sequence_number.fract() != 0.0)
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut episodes = vec![];
|
||||||
|
if !self.series_visited {
|
||||||
|
if self.audio_locales.contains(&episode.audio_locale) {
|
||||||
|
episodes.push(episode.clone())
|
||||||
|
}
|
||||||
|
for version in &episode.versions {
|
||||||
|
// `episode` is also a version of itself. the if block above already adds the
|
||||||
|
// episode if it matches the requested audio, so it doesn't need to be requested
|
||||||
|
// here again
|
||||||
|
if version.id == episode.id {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if self.audio_locales.contains(&version.audio_locale) {
|
||||||
|
episodes.push(version.episode().await?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let audio_locales: Vec<Locale> =
|
||||||
|
episodes.iter().map(|e| e.audio_locale.clone()).collect();
|
||||||
|
let missing_audios = missing_locales(&audio_locales, &self.audio_locales);
|
||||||
|
if !missing_audios.is_empty()
|
||||||
|
&& !(self.audios_missing)(
|
||||||
|
FilterMediaScope::Episode(vec![&episode]),
|
||||||
|
missing_audios,
|
||||||
|
)?
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut subtitle_locales: Vec<Locale> = episodes
|
||||||
|
.iter()
|
||||||
|
.flat_map(|e| e.subtitle_locales.clone())
|
||||||
|
.collect();
|
||||||
|
subtitle_locales.sort();
|
||||||
|
subtitle_locales.dedup();
|
||||||
|
let missing_subtitles = missing_locales(&subtitle_locales, &self.subtitle_locales);
|
||||||
|
if !missing_subtitles.is_empty()
|
||||||
|
&& !(self.subtitles_missing)(
|
||||||
|
FilterMediaScope::Episode(vec![&episode]),
|
||||||
|
missing_subtitles,
|
||||||
|
)?
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
episodes.push(episode.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(seasons_with_premium) = &mut self.season_with_premium {
|
||||||
|
let episodes_len_before = episodes.len();
|
||||||
|
episodes.retain(|e| !e.is_premium_only && !self.is_premium);
|
||||||
|
if episodes_len_before < episodes.len()
|
||||||
|
&& !seasons_with_premium.contains(&episode.season_number)
|
||||||
|
{
|
||||||
|
(self.no_premium)(episode.season_number)?;
|
||||||
|
seasons_with_premium.push(episode.season_number)
|
||||||
|
}
|
||||||
|
|
||||||
|
if episodes.is_empty() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut relative_episode_number = None;
|
||||||
|
let mut relative_sequence_number = None;
|
||||||
|
if self.relative_episode_number {
|
||||||
|
let season_eps = match self.season_episodes.get(&episode.season_id) {
|
||||||
|
Some(eps) => eps,
|
||||||
|
None => {
|
||||||
|
self.season_episodes.insert(
|
||||||
|
episode.season_id.clone(),
|
||||||
|
episode.season().await?.episodes().await?,
|
||||||
|
);
|
||||||
|
self.season_episodes.get(&episode.season_id).unwrap()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut non_integer_sequence_number_count = 0;
|
||||||
|
for (i, ep) in season_eps.iter().enumerate() {
|
||||||
|
if ep.sequence_number != 0.0 || ep.sequence_number.fract() == 0.0 {
|
||||||
|
non_integer_sequence_number_count += 1
|
||||||
|
}
|
||||||
|
if ep.id == episode.id {
|
||||||
|
relative_episode_number = Some(i + 1);
|
||||||
|
relative_sequence_number = Some(
|
||||||
|
(i + 1 - non_integer_sequence_number_count) as f32
|
||||||
|
+ fract(ep.sequence_number),
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if relative_episode_number.is_none() || relative_sequence_number.is_none() {
|
||||||
|
warn!(
|
||||||
|
"Failed to get relative episode number for episode {} ({}) of {} season {}",
|
||||||
|
episode.sequence_number,
|
||||||
|
episode.title,
|
||||||
|
episode.series_title,
|
||||||
|
episode.season_number,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(episodes
|
||||||
|
.into_iter()
|
||||||
|
.map(|e| {
|
||||||
|
SingleFormat::new_from_episode(
|
||||||
|
e.clone(),
|
||||||
|
e.subtitle_locales,
|
||||||
|
relative_episode_number.map(|n| n as u32),
|
||||||
|
relative_sequence_number,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>> {
|
||||||
|
Ok(movie_listing.movies().await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn visit_movie(&mut self, movie: Movie) -> Result<Vec<SingleFormat>> {
|
||||||
|
Ok(vec![SingleFormat::new_from_movie(movie, vec![])])
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Vec<SingleFormat>> {
|
||||||
|
Ok(vec![SingleFormat::new_from_music_video(music_video)])
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn visit_concert(&mut self, concert: Concert) -> Result<Vec<SingleFormat>> {
|
||||||
|
Ok(vec![SingleFormat::new_from_concert(concert)])
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn finish(self, input: Vec<Vec<SingleFormat>>) -> Result<SingleFormatCollection> {
|
||||||
|
let flatten_input: Vec<SingleFormat> = input.into_iter().flatten().collect();
|
||||||
|
|
||||||
|
let mut single_format_collection = SingleFormatCollection::new();
|
||||||
|
|
||||||
|
let mut pre_sorted: BTreeMap<String, Vec<SingleFormat>> = BTreeMap::new();
|
||||||
|
for data in flatten_input {
|
||||||
|
pre_sorted
|
||||||
|
.entry(data.identifier.clone())
|
||||||
|
.or_default()
|
||||||
|
.push(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut sorted: Vec<(String, Vec<SingleFormat>)> = pre_sorted.into_iter().collect();
|
||||||
|
sorted.sort_by(|(_, a), (_, b)| {
|
||||||
|
self.season_sorting
|
||||||
|
.iter()
|
||||||
|
.position(|p| p == &a.first().unwrap().season_id)
|
||||||
|
.unwrap()
|
||||||
|
.cmp(
|
||||||
|
&self
|
||||||
|
.season_sorting
|
||||||
|
.iter()
|
||||||
|
.position(|p| p == &b.first().unwrap().season_id)
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
for (_, mut data) in sorted {
|
||||||
|
data.sort_by(|a, b| {
|
||||||
|
self.audio_locales
|
||||||
|
.iter()
|
||||||
|
.position(|p| p == &a.audio)
|
||||||
|
.unwrap_or(usize::MAX)
|
||||||
|
.cmp(
|
||||||
|
&self
|
||||||
|
.audio_locales
|
||||||
|
.iter()
|
||||||
|
.position(|p| p == &b.audio)
|
||||||
|
.unwrap_or(usize::MAX),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
single_format_collection.add_single_formats(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(single_format_collection)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn visit(
|
||||||
|
mut self,
|
||||||
|
media_collection: MediaCollection,
|
||||||
|
) -> Result<SingleFormatCollection> {
|
||||||
let mut items = vec![media_collection];
|
let mut items = vec![media_collection];
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
|
|
||||||
|
|
@ -45,9 +425,7 @@ pub trait Filter {
|
||||||
.collect::<Vec<MediaCollection>>(),
|
.collect::<Vec<MediaCollection>>(),
|
||||||
),
|
),
|
||||||
MediaCollection::Episode(episode) => {
|
MediaCollection::Episode(episode) => {
|
||||||
if let Some(t) = self.visit_episode(episode).await? {
|
result.push(self.visit_episode(episode).await?)
|
||||||
result.push(t)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
MediaCollection::MovieListing(movie_listing) => new_items.extend(
|
MediaCollection::MovieListing(movie_listing) => new_items.extend(
|
||||||
self.visit_movie_listing(movie_listing)
|
self.visit_movie_listing(movie_listing)
|
||||||
|
|
@ -56,20 +434,12 @@ pub trait Filter {
|
||||||
.map(|m| m.into())
|
.map(|m| m.into())
|
||||||
.collect::<Vec<MediaCollection>>(),
|
.collect::<Vec<MediaCollection>>(),
|
||||||
),
|
),
|
||||||
MediaCollection::Movie(movie) => {
|
MediaCollection::Movie(movie) => result.push(self.visit_movie(movie).await?),
|
||||||
if let Some(t) = self.visit_movie(movie).await? {
|
|
||||||
result.push(t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
MediaCollection::MusicVideo(music_video) => {
|
MediaCollection::MusicVideo(music_video) => {
|
||||||
if let Some(t) = self.visit_music_video(music_video).await? {
|
result.push(self.visit_music_video(music_video).await?)
|
||||||
result.push(t)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
MediaCollection::Concert(concert) => {
|
MediaCollection::Concert(concert) => {
|
||||||
if let Some(t) = self.visit_concert(concert).await? {
|
result.push(self.visit_concert(concert).await?)
|
||||||
result.push(t)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -79,8 +449,10 @@ pub trait Filter {
|
||||||
|
|
||||||
self.finish(result).await
|
self.finish(result).await
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async fn finish(self, input: Vec<Self::T>) -> Result<Self::Output>;
|
fn missing_locales<'a>(available: &[Locale], searched: &'a [Locale]) -> Vec<&'a Locale> {
|
||||||
|
searched.iter().filter(|p| !available.contains(p)).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Remove all duplicates from a [`Vec`].
|
/// Remove all duplicates from a [`Vec`].
|
||||||
|
|
|
||||||
19
crunchy-cli-core/src/utils/fmt.rs
Normal file
19
crunchy-cli-core/src/utils/fmt.rs
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
use chrono::TimeDelta;
|
||||||
|
|
||||||
|
pub fn format_time_delta(time_delta: &TimeDelta) -> String {
|
||||||
|
let negative = *time_delta < TimeDelta::zero();
|
||||||
|
let time_delta = time_delta.abs();
|
||||||
|
let hours = time_delta.num_hours();
|
||||||
|
let minutes = time_delta.num_minutes() - time_delta.num_hours() * 60;
|
||||||
|
let seconds = time_delta.num_seconds() - time_delta.num_minutes() * 60;
|
||||||
|
let milliseconds = time_delta.num_milliseconds() - time_delta.num_seconds() * 1000;
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{}{}:{:0>2}:{:0>2}.{:0>3}",
|
||||||
|
if negative { "-" } else { "" },
|
||||||
|
hours,
|
||||||
|
minutes,
|
||||||
|
seconds,
|
||||||
|
milliseconds
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
@ -1,20 +1,29 @@
|
||||||
use crate::utils::filter::real_dedup_vec;
|
use crate::utils::filter::real_dedup_vec;
|
||||||
|
use crate::utils::locale::LanguageTagging;
|
||||||
use crate::utils::log::tab_info;
|
use crate::utils::log::tab_info;
|
||||||
use crate::utils::os::is_special_file;
|
use crate::utils::os::{is_special_file, sanitize};
|
||||||
use anyhow::Result;
|
use anyhow::{bail, Result};
|
||||||
use chrono::Duration;
|
use chrono::{Datelike, Duration};
|
||||||
use crunchyroll_rs::media::{Resolution, Stream, Subtitle, VariantData};
|
use crunchyroll_rs::media::{SkipEvents, Stream, StreamData, Subtitle};
|
||||||
use crunchyroll_rs::{Concert, Episode, Locale, MediaCollection, Movie, MusicVideo};
|
use crunchyroll_rs::{Concert, Episode, Locale, MediaCollection, Movie, MusicVideo};
|
||||||
use log::{debug, info, warn};
|
use log::{debug, info};
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
use std::env;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SingleFormat {
|
pub struct SingleFormat {
|
||||||
|
pub identifier: String,
|
||||||
|
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub description: String,
|
pub description: String,
|
||||||
|
|
||||||
|
pub release_year: u64,
|
||||||
|
pub release_month: u64,
|
||||||
|
pub release_day: u64,
|
||||||
|
|
||||||
pub audio: Locale,
|
pub audio: Locale,
|
||||||
pub subtitles: Vec<Locale>,
|
pub subtitles: Vec<Locale>,
|
||||||
|
|
||||||
|
|
@ -27,8 +36,9 @@ pub struct SingleFormat {
|
||||||
|
|
||||||
pub episode_id: String,
|
pub episode_id: String,
|
||||||
pub episode_number: String,
|
pub episode_number: String,
|
||||||
pub sequence_number: f32,
|
|
||||||
pub relative_episode_number: Option<u32>,
|
pub relative_episode_number: Option<u32>,
|
||||||
|
pub sequence_number: f32,
|
||||||
|
pub relative_sequence_number: Option<f32>,
|
||||||
|
|
||||||
pub duration: Duration,
|
pub duration: Duration,
|
||||||
|
|
||||||
|
|
@ -40,10 +50,26 @@ impl SingleFormat {
|
||||||
episode: Episode,
|
episode: Episode,
|
||||||
subtitles: Vec<Locale>,
|
subtitles: Vec<Locale>,
|
||||||
relative_episode_number: Option<u32>,
|
relative_episode_number: Option<u32>,
|
||||||
|
relative_sequence_number: Option<f32>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
identifier: if episode.identifier.is_empty() {
|
||||||
|
// crunchyroll sometimes leafs the identifier field empty so we have to build it
|
||||||
|
// ourself. it's not 100% save that the identifier which is built here is the same
|
||||||
|
// as if crunchyroll would deliver it (because the variables used here may also be
|
||||||
|
// wrong delivered by crunchy), but it's the best thing i can do at the moment
|
||||||
|
format!(
|
||||||
|
"{}|S{}|E{}",
|
||||||
|
episode.series_id, episode.season_number, episode.sequence_number
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
episode.identifier.clone()
|
||||||
|
},
|
||||||
title: episode.title.clone(),
|
title: episode.title.clone(),
|
||||||
description: episode.description.clone(),
|
description: episode.description.clone(),
|
||||||
|
release_year: episode.episode_air_date.year() as u64,
|
||||||
|
release_month: episode.episode_air_date.month() as u64,
|
||||||
|
release_day: episode.episode_air_date.day() as u64,
|
||||||
audio: episode.audio_locale.clone(),
|
audio: episode.audio_locale.clone(),
|
||||||
subtitles,
|
subtitles,
|
||||||
series_id: episode.series_id.clone(),
|
series_id: episode.series_id.clone(),
|
||||||
|
|
@ -59,6 +85,7 @@ impl SingleFormat {
|
||||||
},
|
},
|
||||||
sequence_number: episode.sequence_number,
|
sequence_number: episode.sequence_number,
|
||||||
relative_episode_number,
|
relative_episode_number,
|
||||||
|
relative_sequence_number,
|
||||||
duration: episode.duration,
|
duration: episode.duration,
|
||||||
source: episode.into(),
|
source: episode.into(),
|
||||||
}
|
}
|
||||||
|
|
@ -66,8 +93,12 @@ impl SingleFormat {
|
||||||
|
|
||||||
pub fn new_from_movie(movie: Movie, subtitles: Vec<Locale>) -> Self {
|
pub fn new_from_movie(movie: Movie, subtitles: Vec<Locale>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
identifier: movie.id.clone(),
|
||||||
title: movie.title.clone(),
|
title: movie.title.clone(),
|
||||||
description: movie.description.clone(),
|
description: movie.description.clone(),
|
||||||
|
release_year: movie.free_available_date.year() as u64,
|
||||||
|
release_month: movie.free_available_date.month() as u64,
|
||||||
|
release_day: movie.free_available_date.day() as u64,
|
||||||
audio: Locale::ja_JP,
|
audio: Locale::ja_JP,
|
||||||
subtitles,
|
subtitles,
|
||||||
series_id: movie.movie_listing_id.clone(),
|
series_id: movie.movie_listing_id.clone(),
|
||||||
|
|
@ -77,8 +108,9 @@ impl SingleFormat {
|
||||||
season_number: 1,
|
season_number: 1,
|
||||||
episode_id: movie.id.clone(),
|
episode_id: movie.id.clone(),
|
||||||
episode_number: "1".to_string(),
|
episode_number: "1".to_string(),
|
||||||
sequence_number: 1.0,
|
|
||||||
relative_episode_number: Some(1),
|
relative_episode_number: Some(1),
|
||||||
|
sequence_number: 1.0,
|
||||||
|
relative_sequence_number: Some(1.0),
|
||||||
duration: movie.duration,
|
duration: movie.duration,
|
||||||
source: movie.into(),
|
source: movie.into(),
|
||||||
}
|
}
|
||||||
|
|
@ -86,8 +118,12 @@ impl SingleFormat {
|
||||||
|
|
||||||
pub fn new_from_music_video(music_video: MusicVideo) -> Self {
|
pub fn new_from_music_video(music_video: MusicVideo) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
identifier: music_video.id.clone(),
|
||||||
title: music_video.title.clone(),
|
title: music_video.title.clone(),
|
||||||
description: music_video.description.clone(),
|
description: music_video.description.clone(),
|
||||||
|
release_year: music_video.original_release.year() as u64,
|
||||||
|
release_month: music_video.original_release.month() as u64,
|
||||||
|
release_day: music_video.original_release.day() as u64,
|
||||||
audio: Locale::ja_JP,
|
audio: Locale::ja_JP,
|
||||||
subtitles: vec![],
|
subtitles: vec![],
|
||||||
series_id: music_video.id.clone(),
|
series_id: music_video.id.clone(),
|
||||||
|
|
@ -97,8 +133,9 @@ impl SingleFormat {
|
||||||
season_number: 1,
|
season_number: 1,
|
||||||
episode_id: music_video.id.clone(),
|
episode_id: music_video.id.clone(),
|
||||||
episode_number: "1".to_string(),
|
episode_number: "1".to_string(),
|
||||||
sequence_number: 1.0,
|
|
||||||
relative_episode_number: Some(1),
|
relative_episode_number: Some(1),
|
||||||
|
sequence_number: 1.0,
|
||||||
|
relative_sequence_number: Some(1.0),
|
||||||
duration: music_video.duration,
|
duration: music_video.duration,
|
||||||
source: music_video.into(),
|
source: music_video.into(),
|
||||||
}
|
}
|
||||||
|
|
@ -106,8 +143,12 @@ impl SingleFormat {
|
||||||
|
|
||||||
pub fn new_from_concert(concert: Concert) -> Self {
|
pub fn new_from_concert(concert: Concert) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
identifier: concert.id.clone(),
|
||||||
title: concert.title.clone(),
|
title: concert.title.clone(),
|
||||||
description: concert.description.clone(),
|
description: concert.description.clone(),
|
||||||
|
release_year: concert.original_release.year() as u64,
|
||||||
|
release_month: concert.original_release.month() as u64,
|
||||||
|
release_day: concert.original_release.day() as u64,
|
||||||
audio: Locale::ja_JP,
|
audio: Locale::ja_JP,
|
||||||
subtitles: vec![],
|
subtitles: vec![],
|
||||||
series_id: concert.id.clone(),
|
series_id: concert.id.clone(),
|
||||||
|
|
@ -117,8 +158,9 @@ impl SingleFormat {
|
||||||
season_number: 1,
|
season_number: 1,
|
||||||
episode_id: concert.id.clone(),
|
episode_id: concert.id.clone(),
|
||||||
episode_number: "1".to_string(),
|
episode_number: "1".to_string(),
|
||||||
sequence_number: 1.0,
|
|
||||||
relative_episode_number: Some(1),
|
relative_episode_number: Some(1),
|
||||||
|
sequence_number: 1.0,
|
||||||
|
relative_sequence_number: Some(1.0),
|
||||||
duration: concert.duration,
|
duration: concert.duration,
|
||||||
source: concert.into(),
|
source: concert.into(),
|
||||||
}
|
}
|
||||||
|
|
@ -126,29 +168,27 @@ impl SingleFormat {
|
||||||
|
|
||||||
pub async fn stream(&self) -> Result<Stream> {
|
pub async fn stream(&self) -> Result<Stream> {
|
||||||
let stream = match &self.source {
|
let stream = match &self.source {
|
||||||
MediaCollection::Episode(e) => {
|
MediaCollection::Episode(e) => e.stream_maybe_without_drm().await,
|
||||||
if let Ok(stream) = e.legacy_streams().await {
|
MediaCollection::Movie(m) => m.stream_maybe_without_drm().await,
|
||||||
stream
|
MediaCollection::MusicVideo(mv) => mv.stream_maybe_without_drm().await,
|
||||||
} else {
|
MediaCollection::Concert(c) => c.stream_maybe_without_drm().await,
|
||||||
let stream = e.streams().await?;
|
|
||||||
warn!("Failed to get stream via legacy endpoint");
|
|
||||||
stream
|
|
||||||
}
|
|
||||||
}
|
|
||||||
MediaCollection::Movie(m) => {
|
|
||||||
if let Ok(stream) = m.legacy_streams().await {
|
|
||||||
stream
|
|
||||||
} else {
|
|
||||||
let stream = m.streams().await?;
|
|
||||||
warn!("Failed to get stream via legacy endpoint");
|
|
||||||
stream
|
|
||||||
}
|
|
||||||
}
|
|
||||||
MediaCollection::MusicVideo(mv) => mv.streams().await?,
|
|
||||||
MediaCollection::Concert(c) => c.streams().await?,
|
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
Ok(stream)
|
|
||||||
|
if let Err(crunchyroll_rs::error::Error::Request { message, .. }) = &stream {
|
||||||
|
if message.starts_with("TOO_MANY_ACTIVE_STREAMS") {
|
||||||
|
bail!("Too many active/parallel streams. Please close at least one stream you're watching and try again")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(stream?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn skip_events(&self) -> Result<Option<SkipEvents>> {
|
||||||
|
match &self.source {
|
||||||
|
MediaCollection::Episode(e) => Ok(Some(e.skip_events().await?)),
|
||||||
|
MediaCollection::Movie(m) => Ok(Some(m.skip_events().await?)),
|
||||||
|
_ => Ok(None),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn source_type(&self) -> String {
|
pub fn source_type(&self) -> String {
|
||||||
|
|
@ -163,10 +203,11 @@ impl SingleFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_episode(&self) -> bool {
|
pub fn is_episode(&self) -> bool {
|
||||||
match self.source {
|
matches!(self.source, MediaCollection::Episode(_))
|
||||||
MediaCollection::Episode(_) => true,
|
}
|
||||||
_ => false,
|
|
||||||
}
|
pub fn is_special(&self) -> bool {
|
||||||
|
self.sequence_number == 0.0 || self.sequence_number.fract() != 0.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -174,7 +215,7 @@ struct SingleFormatCollectionEpisodeKey(f32);
|
||||||
|
|
||||||
impl PartialOrd for SingleFormatCollectionEpisodeKey {
|
impl PartialOrd for SingleFormatCollectionEpisodeKey {
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
self.0.partial_cmp(&other.0)
|
Some(self.cmp(other))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Ord for SingleFormatCollectionEpisodeKey {
|
impl Ord for SingleFormatCollectionEpisodeKey {
|
||||||
|
|
@ -189,8 +230,43 @@ impl PartialEq for SingleFormatCollectionEpisodeKey {
|
||||||
}
|
}
|
||||||
impl Eq for SingleFormatCollectionEpisodeKey {}
|
impl Eq for SingleFormatCollectionEpisodeKey {}
|
||||||
|
|
||||||
|
struct SingleFormatCollectionSeasonKey((u32, String));
|
||||||
|
|
||||||
|
#[allow(clippy::non_canonical_partial_ord_impl)]
|
||||||
|
impl PartialOrd for SingleFormatCollectionSeasonKey {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
let mut cmp = self.0 .0.partial_cmp(&other.0 .0);
|
||||||
|
if let Some(ordering) = cmp {
|
||||||
|
if matches!(ordering, Ordering::Equal) && self.0 .1 != other.0 .1 {
|
||||||
|
// first come first serve
|
||||||
|
cmp = Some(Ordering::Greater)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cmp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Ord for SingleFormatCollectionSeasonKey {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
let mut cmp = self.0 .0.cmp(&other.0 .0);
|
||||||
|
if matches!(cmp, Ordering::Equal) && self.0 .1 != other.0 .1 {
|
||||||
|
// first come first serve
|
||||||
|
cmp = Ordering::Greater
|
||||||
|
}
|
||||||
|
cmp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl PartialEq for SingleFormatCollectionSeasonKey {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.0.eq(&other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Eq for SingleFormatCollectionSeasonKey {}
|
||||||
|
|
||||||
pub struct SingleFormatCollection(
|
pub struct SingleFormatCollection(
|
||||||
BTreeMap<u32, BTreeMap<SingleFormatCollectionEpisodeKey, Vec<SingleFormat>>>,
|
BTreeMap<
|
||||||
|
SingleFormatCollectionSeasonKey,
|
||||||
|
BTreeMap<SingleFormatCollectionEpisodeKey, Vec<SingleFormat>>,
|
||||||
|
>,
|
||||||
);
|
);
|
||||||
|
|
||||||
impl SingleFormatCollection {
|
impl SingleFormatCollection {
|
||||||
|
|
@ -205,8 +281,11 @@ impl SingleFormatCollection {
|
||||||
pub fn add_single_formats(&mut self, single_formats: Vec<SingleFormat>) {
|
pub fn add_single_formats(&mut self, single_formats: Vec<SingleFormat>) {
|
||||||
let format = single_formats.first().unwrap();
|
let format = single_formats.first().unwrap();
|
||||||
self.0
|
self.0
|
||||||
.entry(format.season_number)
|
.entry(SingleFormatCollectionSeasonKey((
|
||||||
.or_insert(BTreeMap::new())
|
format.season_number,
|
||||||
|
format.season_id.clone(),
|
||||||
|
)))
|
||||||
|
.or_default()
|
||||||
.insert(
|
.insert(
|
||||||
SingleFormatCollectionEpisodeKey(format.sequence_number),
|
SingleFormatCollectionEpisodeKey(format.sequence_number),
|
||||||
single_formats,
|
single_formats,
|
||||||
|
|
@ -215,18 +294,13 @@ impl SingleFormatCollection {
|
||||||
|
|
||||||
pub fn full_visual_output(&self) {
|
pub fn full_visual_output(&self) {
|
||||||
debug!("Series has {} seasons", self.0.len());
|
debug!("Series has {} seasons", self.0.len());
|
||||||
for (season_number, episodes) in &self.0 {
|
for (season_key, episodes) in &self.0 {
|
||||||
|
let first_episode = episodes.first_key_value().unwrap().1.first().unwrap();
|
||||||
info!(
|
info!(
|
||||||
"{} Season {}",
|
"{} Season {} ({})",
|
||||||
episodes
|
first_episode.series_name.clone(),
|
||||||
.first_key_value()
|
season_key.0 .0,
|
||||||
.unwrap()
|
first_episode.season_title.clone(),
|
||||||
.1
|
|
||||||
.first()
|
|
||||||
.unwrap()
|
|
||||||
.series_name
|
|
||||||
.clone(),
|
|
||||||
season_number
|
|
||||||
);
|
);
|
||||||
for (i, (_, formats)) in episodes.iter().enumerate() {
|
for (i, (_, formats)) in episodes.iter().enumerate() {
|
||||||
let format = formats.first().unwrap();
|
let format = formats.first().unwrap();
|
||||||
|
|
@ -264,9 +338,7 @@ impl Iterator for SingleFormatCollectionIterator {
|
||||||
type Item = Vec<SingleFormat>;
|
type Item = Vec<SingleFormat>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
let Some((_, episodes)) = self.0.0.iter_mut().next() else {
|
let (_, episodes) = self.0 .0.iter_mut().next()?;
|
||||||
return None
|
|
||||||
};
|
|
||||||
|
|
||||||
let value = episodes.pop_first().unwrap().1;
|
let value = episodes.pop_first().unwrap().1;
|
||||||
if episodes.is_empty() {
|
if episodes.is_empty() {
|
||||||
|
|
@ -276,6 +348,7 @@ impl Iterator for SingleFormatCollectionIterator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Format {
|
pub struct Format {
|
||||||
pub title: String,
|
pub title: String,
|
||||||
|
|
@ -283,9 +356,14 @@ pub struct Format {
|
||||||
|
|
||||||
pub locales: Vec<(Locale, Vec<Locale>)>,
|
pub locales: Vec<(Locale, Vec<Locale>)>,
|
||||||
|
|
||||||
pub resolution: Resolution,
|
pub width: u64,
|
||||||
|
pub height: u64,
|
||||||
pub fps: f64,
|
pub fps: f64,
|
||||||
|
|
||||||
|
pub release_year: u64,
|
||||||
|
pub release_month: u64,
|
||||||
|
pub release_day: u64,
|
||||||
|
|
||||||
pub series_id: String,
|
pub series_id: String,
|
||||||
pub series_name: String,
|
pub series_name: String,
|
||||||
|
|
||||||
|
|
@ -295,13 +373,15 @@ pub struct Format {
|
||||||
|
|
||||||
pub episode_id: String,
|
pub episode_id: String,
|
||||||
pub episode_number: String,
|
pub episode_number: String,
|
||||||
pub sequence_number: f32,
|
|
||||||
pub relative_episode_number: Option<u32>,
|
pub relative_episode_number: Option<u32>,
|
||||||
|
pub sequence_number: f32,
|
||||||
|
pub relative_sequence_number: Option<f32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Format {
|
impl Format {
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
pub fn from_single_formats(
|
pub fn from_single_formats(
|
||||||
mut single_formats: Vec<(SingleFormat, VariantData, Vec<(Subtitle, bool)>)>,
|
mut single_formats: Vec<(SingleFormat, StreamData, Vec<(Subtitle, bool)>)>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let locales: Vec<(Locale, Vec<Locale>)> = single_formats
|
let locales: Vec<(Locale, Vec<Locale>)> = single_formats
|
||||||
.iter()
|
.iter()
|
||||||
|
|
@ -309,7 +389,7 @@ impl Format {
|
||||||
(
|
(
|
||||||
single_format.audio.clone(),
|
single_format.audio.clone(),
|
||||||
subtitles
|
subtitles
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|(s, _)| s.locale.clone())
|
.map(|(s, _)| s.locale.clone())
|
||||||
.collect::<Vec<Locale>>(),
|
.collect::<Vec<Locale>>(),
|
||||||
)
|
)
|
||||||
|
|
@ -321,8 +401,12 @@ impl Format {
|
||||||
title: first_format.title,
|
title: first_format.title,
|
||||||
description: first_format.description,
|
description: first_format.description,
|
||||||
locales,
|
locales,
|
||||||
resolution: first_stream.resolution,
|
width: first_stream.resolution().unwrap().width,
|
||||||
fps: first_stream.fps,
|
height: first_stream.resolution().unwrap().height,
|
||||||
|
fps: first_stream.fps().unwrap(),
|
||||||
|
release_year: first_format.release_year,
|
||||||
|
release_month: first_format.release_month,
|
||||||
|
release_day: first_format.release_day,
|
||||||
series_id: first_format.series_id,
|
series_id: first_format.series_id,
|
||||||
series_name: first_format.series_name,
|
series_name: first_format.series_name,
|
||||||
season_id: first_format.season_id,
|
season_id: first_format.season_id,
|
||||||
|
|
@ -330,66 +414,152 @@ impl Format {
|
||||||
season_number: first_format.season_number,
|
season_number: first_format.season_number,
|
||||||
episode_id: first_format.episode_id,
|
episode_id: first_format.episode_id,
|
||||||
episode_number: first_format.episode_number,
|
episode_number: first_format.episode_number,
|
||||||
sequence_number: first_format.sequence_number,
|
|
||||||
relative_episode_number: first_format.relative_episode_number,
|
relative_episode_number: first_format.relative_episode_number,
|
||||||
|
sequence_number: first_format.sequence_number,
|
||||||
|
relative_sequence_number: first_format.relative_sequence_number,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Formats the given string if it has specific pattern in it. It's possible to sanitize it which
|
/// Formats the given string if it has specific pattern in it. It also sanitizes the filename.
|
||||||
/// removes characters which can cause failures if the output string is used as a file name.
|
pub fn format_path(
|
||||||
pub fn format_path(&self, path: PathBuf, sanitize: bool) -> PathBuf {
|
&self,
|
||||||
let sanitize_func = if sanitize {
|
path: PathBuf,
|
||||||
|s: &str| sanitize_filename::sanitize(s)
|
universal: bool,
|
||||||
} else {
|
language_tagging: Option<&LanguageTagging>,
|
||||||
// converting this to a string is actually unnecessary
|
) -> PathBuf {
|
||||||
|s: &str| s.to_string()
|
let path = path
|
||||||
};
|
.to_string_lossy()
|
||||||
|
.to_string()
|
||||||
let as_string = path.to_string_lossy().to_string();
|
.replace("{title}", &sanitize(&self.title, true, universal))
|
||||||
|
.replace(
|
||||||
PathBuf::from(
|
"{audio}",
|
||||||
as_string
|
&sanitize(
|
||||||
.replace("{title}", &sanitize_func(&self.title))
|
self.locales
|
||||||
.replace(
|
.iter()
|
||||||
"{audio}",
|
.map(|(a, _)| language_tagging.map_or(a.to_string(), |t| t.for_locale(a)))
|
||||||
&sanitize_func(
|
.collect::<Vec<String>>()
|
||||||
&self
|
.join(
|
||||||
.locales
|
&env::var("CRUNCHY_CLI_FORMAT_DELIMITER")
|
||||||
.iter()
|
.map_or("_".to_string(), |e| e),
|
||||||
.map(|(a, _)| a.to_string())
|
),
|
||||||
.collect::<Vec<String>>()
|
true,
|
||||||
.join("|"),
|
universal,
|
||||||
),
|
|
||||||
)
|
|
||||||
.replace("{resolution}", &sanitize_func(&self.resolution.to_string()))
|
|
||||||
.replace("{series_id}", &sanitize_func(&self.series_id))
|
|
||||||
.replace("{series_name}", &sanitize_func(&self.series_name))
|
|
||||||
.replace("{season_id}", &sanitize_func(&self.season_id))
|
|
||||||
.replace("{season_name}", &sanitize_func(&self.season_title))
|
|
||||||
.replace(
|
|
||||||
"{season_number}",
|
|
||||||
&sanitize_func(&format!("{:0>2}", self.season_number.to_string())),
|
|
||||||
)
|
|
||||||
.replace("{episode_id}", &sanitize_func(&self.episode_id))
|
|
||||||
.replace(
|
|
||||||
"{episode_number}",
|
|
||||||
&sanitize_func(&format!("{:0>2}", self.episode_number.to_string())),
|
|
||||||
)
|
|
||||||
.replace(
|
|
||||||
"{relative_episode_number}",
|
|
||||||
&sanitize_func(&format!(
|
|
||||||
"{:0>2}",
|
|
||||||
self.relative_episode_number.unwrap_or_default().to_string()
|
|
||||||
)),
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
.replace(
|
||||||
|
"{width}",
|
||||||
|
&sanitize(self.width.to_string(), true, universal),
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
"{height}",
|
||||||
|
&sanitize(self.height.to_string(), true, universal),
|
||||||
|
)
|
||||||
|
.replace("{series_id}", &sanitize(&self.series_id, true, universal))
|
||||||
|
.replace(
|
||||||
|
"{series_name}",
|
||||||
|
&sanitize(&self.series_name, true, universal),
|
||||||
|
)
|
||||||
|
.replace("{season_id}", &sanitize(&self.season_id, true, universal))
|
||||||
|
.replace(
|
||||||
|
"{season_name}",
|
||||||
|
&sanitize(&self.season_title, true, universal),
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
"{season_number}",
|
||||||
|
&format!(
|
||||||
|
"{:0>2}",
|
||||||
|
sanitize(self.season_number.to_string(), true, universal)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.replace("{episode_id}", &sanitize(&self.episode_id, true, universal))
|
||||||
|
.replace(
|
||||||
|
"{episode_number}",
|
||||||
|
&format!("{:0>2}", sanitize(&self.episode_number, true, universal)),
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
"{relative_episode_number}",
|
||||||
|
&format!(
|
||||||
|
"{:0>2}",
|
||||||
|
sanitize(
|
||||||
|
self.relative_episode_number.unwrap_or_default().to_string(),
|
||||||
|
true,
|
||||||
|
universal,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
"{sequence_number}",
|
||||||
|
&format!(
|
||||||
|
"{:0>2}",
|
||||||
|
sanitize(self.sequence_number.to_string(), true, universal)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
"{relative_sequence_number}",
|
||||||
|
&format!(
|
||||||
|
"{:0>2}",
|
||||||
|
sanitize(
|
||||||
|
self.relative_sequence_number
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string(),
|
||||||
|
true,
|
||||||
|
universal,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
"{release_year}",
|
||||||
|
&sanitize(self.release_year.to_string(), true, universal),
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
"{release_month}",
|
||||||
|
&format!(
|
||||||
|
"{:0>2}",
|
||||||
|
sanitize(self.release_month.to_string(), true, universal)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
"{release_day}",
|
||||||
|
&format!(
|
||||||
|
"{:0>2}",
|
||||||
|
sanitize(self.release_day.to_string(), true, universal)
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut path = PathBuf::from(path);
|
||||||
|
|
||||||
|
// make sure that every path section has a maximum of 255 characters
|
||||||
|
if path.file_name().unwrap_or_default().to_string_lossy().len() > 255 {
|
||||||
|
let name = path
|
||||||
|
.file_stem()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string();
|
||||||
|
let ext = path
|
||||||
|
.extension()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string();
|
||||||
|
if ext != name {
|
||||||
|
path.set_file_name(format!("{}.{}", &name[..(255 - ext.len() - 1)], ext))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path.iter()
|
||||||
|
.map(|s| {
|
||||||
|
if s.len() > 255 {
|
||||||
|
s.to_string_lossy()[..255].to_string()
|
||||||
|
} else {
|
||||||
|
s.to_string_lossy().to_string()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visual_output(&self, dst: &Path) {
|
pub fn visual_output(&self, dst: &Path) {
|
||||||
info!(
|
info!(
|
||||||
"Downloading {} to {}",
|
"Downloading {} to {}",
|
||||||
self.title,
|
self.title,
|
||||||
if is_special_file(&dst) || dst.to_str().unwrap() == "-" {
|
if is_special_file(dst) || dst.to_str().unwrap() == "-" {
|
||||||
dst.to_string_lossy().to_string()
|
dst.to_string_lossy().to_string()
|
||||||
} else {
|
} else {
|
||||||
format!("'{}'", dst.to_str().unwrap())
|
format!("'{}'", dst.to_str().unwrap())
|
||||||
|
|
@ -418,11 +588,16 @@ impl Format {
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", ")
|
.join(", ")
|
||||||
);
|
);
|
||||||
tab_info!("Resolution: {}", self.resolution);
|
tab_info!("Resolution: {}x{}", self.height, self.width);
|
||||||
tab_info!("FPS: {:.2}", self.fps)
|
tab_info!("FPS: {:.2}", self.fps)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_relative_episodes_fmt<S: AsRef<str>>(s: S) -> bool {
|
pub fn is_special(&self) -> bool {
|
||||||
return s.as_ref().contains("{relative_episode_number}");
|
self.sequence_number == 0.0 || self.sequence_number.fract() != 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_relative_fmt<S: AsRef<str>>(s: S) -> bool {
|
||||||
|
return s.as_ref().contains("{relative_episode_number}")
|
||||||
|
|| s.as_ref().contains("{relative_sequence_number}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
73
crunchy-cli-core/src/utils/interactive_select.rs
Normal file
73
crunchy-cli-core/src/utils/interactive_select.rs
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
use crate::utils::log::progress_pause;
|
||||||
|
use crunchyroll_rs::Season;
|
||||||
|
use dialoguer::console::Term;
|
||||||
|
use dialoguer::MultiSelect;
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
|
pub fn get_duplicated_seasons(seasons: &Vec<Season>) -> Vec<u32> {
|
||||||
|
let mut season_number_counter = BTreeMap::<u32, u32>::new();
|
||||||
|
for season in seasons {
|
||||||
|
season_number_counter
|
||||||
|
.entry(season.season_number)
|
||||||
|
.and_modify(|c| *c += 1)
|
||||||
|
.or_default();
|
||||||
|
}
|
||||||
|
season_number_counter
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(k, v)| if v > 0 { Some(k) } else { None })
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn check_for_duplicated_seasons(seasons: &mut Vec<Season>) {
|
||||||
|
let mut as_map = BTreeMap::new();
|
||||||
|
for season in seasons.iter() {
|
||||||
|
as_map
|
||||||
|
.entry(season.season_number)
|
||||||
|
.or_insert(vec![])
|
||||||
|
.push(season)
|
||||||
|
}
|
||||||
|
|
||||||
|
let duplicates: Vec<&Season> = as_map
|
||||||
|
.into_values()
|
||||||
|
.filter(|s| s.len() > 1)
|
||||||
|
.flatten()
|
||||||
|
.collect();
|
||||||
|
progress_pause!();
|
||||||
|
let _ = Term::stdout().clear_line();
|
||||||
|
let keep = select(
|
||||||
|
"Duplicated seasons were found. Select the one you want to download (space to select/deselect; enter to continue)",
|
||||||
|
duplicates
|
||||||
|
.iter()
|
||||||
|
.map(|s| format!("Season {} ({})", s.season_number, s.title))
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
progress_pause!();
|
||||||
|
|
||||||
|
let mut remove_ids = vec![];
|
||||||
|
for (i, duplicate) in duplicates.into_iter().enumerate() {
|
||||||
|
if !keep.contains(&i) {
|
||||||
|
remove_ids.push(duplicate.id.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
seasons.retain(|s| !remove_ids.contains(&s.id));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn select(prompt: &str, input: Vec<String>) -> Vec<usize> {
|
||||||
|
if input.is_empty() {
|
||||||
|
return vec![];
|
||||||
|
}
|
||||||
|
|
||||||
|
let def: Vec<bool> = (0..input.len()).map(|_| true).collect();
|
||||||
|
|
||||||
|
let selection = MultiSelect::new()
|
||||||
|
.with_prompt(prompt)
|
||||||
|
.items(&input[..])
|
||||||
|
.defaults(&def[..])
|
||||||
|
.clear(false)
|
||||||
|
.report(false)
|
||||||
|
.interact_on(&Term::stdout())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
selection
|
||||||
|
}
|
||||||
|
|
@ -1,4 +1,124 @@
|
||||||
use crunchyroll_rs::Locale;
|
use crunchyroll_rs::Locale;
|
||||||
|
use log::warn;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
|
pub enum LanguageTagging {
|
||||||
|
Default,
|
||||||
|
IETF,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LanguageTagging {
|
||||||
|
pub fn parse(s: &str) -> Result<Self, String> {
|
||||||
|
Ok(match s.to_lowercase().as_str() {
|
||||||
|
"default" => Self::Default,
|
||||||
|
"ietf" => Self::IETF,
|
||||||
|
_ => return Err(format!("'{}' is not a valid language tagging", s)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn convert_locales(&self, locales: &[Locale]) -> Vec<String> {
|
||||||
|
let ietf_language_codes = ietf_language_codes();
|
||||||
|
let mut converted = vec![];
|
||||||
|
|
||||||
|
match &self {
|
||||||
|
LanguageTagging::Default => {
|
||||||
|
for locale in locales {
|
||||||
|
let Some((_, available)) =
|
||||||
|
ietf_language_codes.iter().find(|(_, l)| l.contains(locale))
|
||||||
|
else {
|
||||||
|
// if no matching IETF language code was found, just pass it as it is
|
||||||
|
converted.push(locale.to_string());
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
converted.push(available.first().unwrap().to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
LanguageTagging::IETF => {
|
||||||
|
for locale in locales {
|
||||||
|
let Some((tag, _)) =
|
||||||
|
ietf_language_codes.iter().find(|(_, l)| l.contains(locale))
|
||||||
|
else {
|
||||||
|
// if no matching IETF language code was found, just pass it as it is
|
||||||
|
converted.push(locale.to_string());
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
converted.push(tag.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
converted
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn for_locale(&self, locale: &Locale) -> String {
|
||||||
|
match &self {
|
||||||
|
LanguageTagging::Default => ietf_language_codes()
|
||||||
|
.iter()
|
||||||
|
.find(|(_, l)| l.contains(locale))
|
||||||
|
.map_or(locale.to_string(), |(_, l)| l[0].to_string()),
|
||||||
|
LanguageTagging::IETF => ietf_language_codes()
|
||||||
|
.iter()
|
||||||
|
.find(|(_, l)| l.contains(locale))
|
||||||
|
.map_or(locale.to_string(), |(tag, _)| tag.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_locales(locales: &[Locale]) -> Vec<Locale> {
|
||||||
|
let ietf_language_codes = ietf_language_codes();
|
||||||
|
let all_locales = Locale::all();
|
||||||
|
|
||||||
|
let mut resolved = vec![];
|
||||||
|
for locale in locales {
|
||||||
|
if all_locales.contains(locale) {
|
||||||
|
resolved.push(locale.clone())
|
||||||
|
} else if let Some((_, resolved_locales)) = ietf_language_codes
|
||||||
|
.iter()
|
||||||
|
.find(|(tag, _)| tag == &locale.to_string().as_str())
|
||||||
|
{
|
||||||
|
let (first, alternatives) = resolved_locales.split_first().unwrap();
|
||||||
|
|
||||||
|
resolved.push(first.clone());
|
||||||
|
// ignoring `Locale::en_IN` because I think the majority of users which want english
|
||||||
|
// audio / subs want the "actual" english version and not the hindi accent dub
|
||||||
|
if !alternatives.is_empty() && resolved_locales.first().unwrap() != &Locale::en_IN {
|
||||||
|
warn!("Resolving locale '{}' to '{}', but there are some alternatives: {}. If you an alternative instead, please write it completely out instead of '{}'", locale, first, alternatives.iter().map(|l| format!("'{l}'")).collect::<Vec<String>>().join(", "), locale)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
resolved.push(locale.clone());
|
||||||
|
warn!("Unknown locale '{}'", locale)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resolved
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ietf_language_codes<'a>() -> Vec<(&'a str, Vec<Locale>)> {
|
||||||
|
vec![
|
||||||
|
("ar", vec![Locale::ar_ME, Locale::ar_SA]),
|
||||||
|
("ca", vec![Locale::ca_ES]),
|
||||||
|
("de", vec![Locale::de_DE]),
|
||||||
|
("en", vec![Locale::en_US, Locale::hi_IN]),
|
||||||
|
("es", vec![Locale::es_ES, Locale::es_419, Locale::es_LA]),
|
||||||
|
("fr", vec![Locale::fr_FR]),
|
||||||
|
("hi", vec![Locale::hi_IN]),
|
||||||
|
("id", vec![Locale::id_ID]),
|
||||||
|
("it", vec![Locale::it_IT]),
|
||||||
|
("ja", vec![Locale::ja_JP]),
|
||||||
|
("ko", vec![Locale::ko_KR]),
|
||||||
|
("ms", vec![Locale::ms_MY]),
|
||||||
|
("pl", vec![Locale::pl_PL]),
|
||||||
|
("pt", vec![Locale::pt_PT, Locale::pt_BR]),
|
||||||
|
("ru", vec![Locale::ru_RU]),
|
||||||
|
("ta", vec![Locale::ta_IN]),
|
||||||
|
("te", vec![Locale::te_IN]),
|
||||||
|
("th", vec![Locale::th_TH]),
|
||||||
|
("tr", vec![Locale::tr_TR]),
|
||||||
|
("vi", vec![Locale::vi_VN]),
|
||||||
|
("zh", vec![Locale::zh_CN, Locale::zh_HK, Locale::zh_TW]),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
/// Return the locale of the system.
|
/// Return the locale of the system.
|
||||||
pub fn system_locale() -> Locale {
|
pub fn system_locale() -> Locale {
|
||||||
|
|
@ -19,8 +139,7 @@ pub fn system_locale() -> Locale {
|
||||||
pub fn all_locale_in_locales(locales: Vec<Locale>) -> Vec<Locale> {
|
pub fn all_locale_in_locales(locales: Vec<Locale>) -> Vec<Locale> {
|
||||||
if locales
|
if locales
|
||||||
.iter()
|
.iter()
|
||||||
.find(|l| l.to_string().to_lowercase().trim() == "all")
|
.any(|l| l.to_string().to_lowercase().trim() == "all")
|
||||||
.is_some()
|
|
||||||
{
|
{
|
||||||
Locale::all()
|
Locale::all()
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
use indicatif::{ProgressBar, ProgressStyle};
|
use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle};
|
||||||
use log::{
|
use log::{
|
||||||
info, set_boxed_logger, set_max_level, Level, LevelFilter, Log, Metadata, Record,
|
info, set_boxed_logger, set_max_level, Level, LevelFilter, Log, Metadata, Record,
|
||||||
SetLoggerError,
|
SetLoggerError,
|
||||||
|
|
@ -37,6 +37,15 @@ macro_rules! progress {
|
||||||
}
|
}
|
||||||
pub(crate) use progress;
|
pub(crate) use progress;
|
||||||
|
|
||||||
|
macro_rules! progress_pause {
|
||||||
|
() => {
|
||||||
|
{
|
||||||
|
log::info!(target: "progress_pause", "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub(crate) use progress_pause;
|
||||||
|
|
||||||
macro_rules! tab_info {
|
macro_rules! tab_info {
|
||||||
($($arg:tt)+) => {
|
($($arg:tt)+) => {
|
||||||
if log::max_level() == log::LevelFilter::Debug {
|
if log::max_level() == log::LevelFilter::Debug {
|
||||||
|
|
@ -48,7 +57,6 @@ macro_rules! tab_info {
|
||||||
}
|
}
|
||||||
pub(crate) use tab_info;
|
pub(crate) use tab_info;
|
||||||
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
pub struct CliLogger {
|
pub struct CliLogger {
|
||||||
level: LevelFilter,
|
level: LevelFilter,
|
||||||
progress: Mutex<Option<ProgressBar>>,
|
progress: Mutex<Option<ProgressBar>>,
|
||||||
|
|
@ -62,6 +70,7 @@ impl Log for CliLogger {
|
||||||
fn log(&self, record: &Record) {
|
fn log(&self, record: &Record) {
|
||||||
if !self.enabled(record.metadata())
|
if !self.enabled(record.metadata())
|
||||||
|| (record.target() != "progress"
|
|| (record.target() != "progress"
|
||||||
|
&& record.target() != "progress_pause"
|
||||||
&& record.target() != "progress_end"
|
&& record.target() != "progress_end"
|
||||||
&& !record.target().starts_with("crunchy_cli"))
|
&& !record.target().starts_with("crunchy_cli"))
|
||||||
{
|
{
|
||||||
|
|
@ -75,6 +84,16 @@ impl Log for CliLogger {
|
||||||
|
|
||||||
match record.target() {
|
match record.target() {
|
||||||
"progress" => self.progress(record, false),
|
"progress" => self.progress(record, false),
|
||||||
|
"progress_pause" => {
|
||||||
|
let progress = self.progress.lock().unwrap();
|
||||||
|
if let Some(p) = &*progress {
|
||||||
|
p.set_draw_target(if p.is_hidden() {
|
||||||
|
ProgressDrawTarget::stdout()
|
||||||
|
} else {
|
||||||
|
ProgressDrawTarget::hidden()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
"progress_end" => self.progress(record, true),
|
"progress_end" => self.progress(record, true),
|
||||||
_ => {
|
_ => {
|
||||||
if self.progress.lock().unwrap().is_some() {
|
if self.progress.lock().unwrap().is_some() {
|
||||||
|
|
@ -149,7 +168,7 @@ impl CliLogger {
|
||||||
let finish_str = "✔";
|
let finish_str = "✔";
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
// windows does not support all unicode characters by default in their consoles, so
|
// windows does not support all unicode characters by default in their consoles, so
|
||||||
// we're using this (square root?) symbol instead. microsoft.
|
// we're using this (square root) symbol instead. microsoft.
|
||||||
let finish_str = "√";
|
let finish_str = "√";
|
||||||
|
|
||||||
let pb = ProgressBar::new_spinner();
|
let pb = ProgressBar::new_spinner();
|
||||||
|
|
@ -158,6 +177,7 @@ impl CliLogger {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.tick_strings(&["—", "\\", "|", "/", finish_str]),
|
.tick_strings(&["—", "\\", "|", "/", finish_str]),
|
||||||
);
|
);
|
||||||
|
pb.set_draw_target(ProgressDrawTarget::stdout());
|
||||||
pb.enable_steady_tick(Duration::from_millis(200));
|
pb.enable_steady_tick(Duration::from_millis(200));
|
||||||
pb.set_message(msg);
|
pb.set_message(msg);
|
||||||
*progress = Some(pb)
|
*progress = Some(pb)
|
||||||
|
|
|
||||||
|
|
@ -3,9 +3,13 @@ pub mod context;
|
||||||
pub mod download;
|
pub mod download;
|
||||||
pub mod ffmpeg;
|
pub mod ffmpeg;
|
||||||
pub mod filter;
|
pub mod filter;
|
||||||
|
pub mod fmt;
|
||||||
pub mod format;
|
pub mod format;
|
||||||
|
pub mod interactive_select;
|
||||||
pub mod locale;
|
pub mod locale;
|
||||||
pub mod log;
|
pub mod log;
|
||||||
pub mod os;
|
pub mod os;
|
||||||
pub mod parse;
|
pub mod parse;
|
||||||
|
pub mod rate_limit;
|
||||||
|
pub mod sync;
|
||||||
pub mod video;
|
pub mod video;
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,14 @@
|
||||||
use log::debug;
|
use log::debug;
|
||||||
|
use regex::{Regex, RegexBuilder};
|
||||||
|
use std::borrow::Cow;
|
||||||
use std::io::ErrorKind;
|
use std::io::ErrorKind;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::pin::Pin;
|
||||||
use std::process::{Command, Stdio};
|
use std::process::{Command, Stdio};
|
||||||
use std::{env, io};
|
use std::task::{Context, Poll};
|
||||||
use tempfile::{Builder, NamedTempFile};
|
use std::{env, fs, io};
|
||||||
|
use tempfile::{Builder, NamedTempFile, TempPath};
|
||||||
|
use tokio::io::{AsyncRead, ReadBuf};
|
||||||
|
|
||||||
pub fn has_ffmpeg() -> bool {
|
pub fn has_ffmpeg() -> bool {
|
||||||
if let Err(e) = Command::new("ffmpeg").stderr(Stdio::null()).spawn() {
|
if let Err(e) = Command::new("ffmpeg").stderr(Stdio::null()).spawn() {
|
||||||
|
|
@ -22,11 +27,11 @@ pub fn has_ffmpeg() -> bool {
|
||||||
/// Get the temp directory either by the specified `CRUNCHY_CLI_TEMP_DIR` env variable or the dir
|
/// Get the temp directory either by the specified `CRUNCHY_CLI_TEMP_DIR` env variable or the dir
|
||||||
/// provided by the os.
|
/// provided by the os.
|
||||||
pub fn temp_directory() -> PathBuf {
|
pub fn temp_directory() -> PathBuf {
|
||||||
env::var("CRUNCHY_CLI_TEMP_DIR").map_or(env::temp_dir(), |d| PathBuf::from(d))
|
env::var("CRUNCHY_CLI_TEMP_DIR").map_or(env::temp_dir(), PathBuf::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Any tempfile should be created with this function. The prefix and directory of every file
|
/// Any tempfile should be created with this function. The prefix and directory of every file
|
||||||
/// created with this method stays the same which is helpful to query all existing tempfiles and
|
/// created with this function stays the same which is helpful to query all existing tempfiles and
|
||||||
/// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like
|
/// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like
|
||||||
/// setting the wrong prefix if done manually.
|
/// setting the wrong prefix if done manually.
|
||||||
pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
|
pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
|
||||||
|
|
@ -41,6 +46,98 @@ pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
|
||||||
Ok(tempfile)
|
Ok(tempfile)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn cache_dir<S: AsRef<str>>(name: S) -> io::Result<PathBuf> {
|
||||||
|
let cache_dir = temp_directory().join(format!(".crunchy-cli_{}_cache", name.as_ref()));
|
||||||
|
fs::create_dir_all(&cache_dir)?;
|
||||||
|
Ok(cache_dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TempNamedPipe {
|
||||||
|
path: TempPath,
|
||||||
|
|
||||||
|
#[cfg(not(target_os = "windows"))]
|
||||||
|
reader: tokio::net::unix::pipe::Receiver,
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
file: tokio::fs::File,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TempNamedPipe {
|
||||||
|
pub fn path(&self) -> &Path {
|
||||||
|
&self.path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsyncRead for TempNamedPipe {
|
||||||
|
fn poll_read(
|
||||||
|
mut self: Pin<&mut Self>,
|
||||||
|
cx: &mut Context<'_>,
|
||||||
|
buf: &mut ReadBuf<'_>,
|
||||||
|
) -> Poll<io::Result<()>> {
|
||||||
|
#[cfg(not(target_os = "windows"))]
|
||||||
|
return Pin::new(&mut self.reader).poll_read(cx, buf);
|
||||||
|
// very very dirty implementation of a 'tail' like behavior
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
{
|
||||||
|
let mut tmp_bytes = vec![0; buf.remaining()];
|
||||||
|
let mut tmp_buf = ReadBuf::new(tmp_bytes.as_mut_slice());
|
||||||
|
|
||||||
|
loop {
|
||||||
|
return match Pin::new(&mut self.file).poll_read(cx, &mut tmp_buf) {
|
||||||
|
Poll::Ready(r) => {
|
||||||
|
if r.is_ok() {
|
||||||
|
if !tmp_buf.filled().is_empty() {
|
||||||
|
buf.put_slice(tmp_buf.filled())
|
||||||
|
} else {
|
||||||
|
// sleep to not loop insanely fast and consume unnecessary system resources
|
||||||
|
std::thread::sleep(std::time::Duration::from_millis(50));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Poll::Ready(r)
|
||||||
|
}
|
||||||
|
Poll::Pending => Poll::Pending,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for TempNamedPipe {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
#[cfg(not(target_os = "windows"))]
|
||||||
|
let _ = nix::unistd::unlink(self.path.to_string_lossy().to_string().as_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn temp_named_pipe() -> io::Result<TempNamedPipe> {
|
||||||
|
let tmp = tempfile("")?;
|
||||||
|
|
||||||
|
#[cfg(not(target_os = "windows"))]
|
||||||
|
{
|
||||||
|
let path = tmp.into_temp_path();
|
||||||
|
let _ = fs::remove_file(&path);
|
||||||
|
|
||||||
|
nix::unistd::mkfifo(
|
||||||
|
path.to_string_lossy().to_string().as_str(),
|
||||||
|
nix::sys::stat::Mode::S_IRWXU,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(TempNamedPipe {
|
||||||
|
reader: tokio::net::unix::pipe::OpenOptions::new().open_receiver(&path)?,
|
||||||
|
path,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
{
|
||||||
|
let (file, path) = tmp.into_parts();
|
||||||
|
|
||||||
|
Ok(TempNamedPipe {
|
||||||
|
file: tokio::fs::File::from_std(file),
|
||||||
|
path,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Check if the given path exists and rename it until the new (renamed) file does not exist.
|
/// Check if the given path exists and rename it until the new (renamed) file does not exist.
|
||||||
pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
|
pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
|
||||||
// do not rename it if it exists but is a special file
|
// do not rename it if it exists but is a special file
|
||||||
|
|
@ -52,9 +149,18 @@ pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
|
||||||
while path.exists() {
|
while path.exists() {
|
||||||
i += 1;
|
i += 1;
|
||||||
|
|
||||||
let ext = path.extension().unwrap_or_default().to_string_lossy();
|
let mut ext = path.extension().unwrap_or_default().to_str().unwrap();
|
||||||
let mut filename = path.file_stem().unwrap_or_default().to_str().unwrap();
|
let mut filename = path.file_stem().unwrap_or_default().to_str().unwrap();
|
||||||
|
|
||||||
|
// if the extension is empty, the filename without extension is probably empty
|
||||||
|
// (e.g. `.mp4`). in this case Rust assumes that `.mp4` is the file stem rather than the
|
||||||
|
// extension. if this is the case, set the extension to the file stem and make the file stem
|
||||||
|
// empty
|
||||||
|
if ext.is_empty() {
|
||||||
|
ext = filename;
|
||||||
|
filename = "";
|
||||||
|
}
|
||||||
|
|
||||||
if filename.ends_with(&format!(" ({})", i - 1)) {
|
if filename.ends_with(&format!(" ({})", i - 1)) {
|
||||||
filename = filename.strip_suffix(&format!(" ({})", i - 1)).unwrap();
|
filename = filename.strip_suffix(&format!(" ({})", i - 1)).unwrap();
|
||||||
}
|
}
|
||||||
|
|
@ -69,3 +175,51 @@ pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
|
||||||
pub fn is_special_file<P: AsRef<Path>>(path: P) -> bool {
|
pub fn is_special_file<P: AsRef<Path>>(path: P) -> bool {
|
||||||
path.as_ref().exists() && !path.as_ref().is_file() && !path.as_ref().is_dir()
|
path.as_ref().exists() && !path.as_ref().is_file() && !path.as_ref().is_dir()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lazy_static::lazy_static! {
|
||||||
|
static ref WINDOWS_NON_PRINTABLE_RE: Regex = Regex::new(r"[\x00-\x1f\x80-\x9f]").unwrap();
|
||||||
|
static ref WINDOWS_ILLEGAL_RE: Regex = Regex::new(r#"[<>:"|?*]"#).unwrap();
|
||||||
|
static ref WINDOWS_RESERVED_RE: Regex = RegexBuilder::new(r"(?i)^(con|prn|aux|nul|com[0-9]|lpt[0-9])(\..*)?$")
|
||||||
|
.case_insensitive(true)
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
static ref WINDOWS_TRAILING_RE: Regex = Regex::new(r"[\. ]+$").unwrap();
|
||||||
|
|
||||||
|
static ref LINUX_NON_PRINTABLE: Regex = Regex::new(r"[\x00]").unwrap();
|
||||||
|
|
||||||
|
static ref RESERVED_RE: Regex = Regex::new(r"^\.+$").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sanitizes a filename with the option to include/exclude the path separator from sanitizing.
|
||||||
|
pub fn sanitize<S: AsRef<str>>(path: S, include_path_separator: bool, universal: bool) -> String {
|
||||||
|
let path = Cow::from(path.as_ref().trim());
|
||||||
|
|
||||||
|
let path = RESERVED_RE.replace(&path, "");
|
||||||
|
|
||||||
|
let collect = |name: String| {
|
||||||
|
if name.len() > 255 {
|
||||||
|
name[..255].to_string()
|
||||||
|
} else {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if universal || cfg!(windows) {
|
||||||
|
let path = WINDOWS_NON_PRINTABLE_RE.replace_all(&path, "");
|
||||||
|
let path = WINDOWS_ILLEGAL_RE.replace_all(&path, "");
|
||||||
|
let path = WINDOWS_RESERVED_RE.replace_all(&path, "");
|
||||||
|
let path = WINDOWS_TRAILING_RE.replace(&path, "");
|
||||||
|
let mut path = path.to_string();
|
||||||
|
if include_path_separator {
|
||||||
|
path = path.replace(['\\', '/'], "");
|
||||||
|
}
|
||||||
|
collect(path)
|
||||||
|
} else {
|
||||||
|
let path = LINUX_NON_PRINTABLE.replace_all(&path, "");
|
||||||
|
let mut path = path.to_string();
|
||||||
|
if include_path_separator {
|
||||||
|
path = path.replace('/', "");
|
||||||
|
}
|
||||||
|
collect(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,19 +8,27 @@ use regex::Regex;
|
||||||
/// If a struct instance equals the [`Default::default()`] it's considered that no find is applied.
|
/// If a struct instance equals the [`Default::default()`] it's considered that no find is applied.
|
||||||
/// If `from_*` is [`None`] they're set to [`u32::MIN`].
|
/// If `from_*` is [`None`] they're set to [`u32::MIN`].
|
||||||
/// If `to_*` is [`None`] they're set to [`u32::MAX`].
|
/// If `to_*` is [`None`] they're set to [`u32::MAX`].
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Default)]
|
||||||
pub struct InnerUrlFilter {
|
pub struct InnerUrlFilter {
|
||||||
from_episode: Option<u32>,
|
from_episode: Option<f32>,
|
||||||
to_episode: Option<u32>,
|
to_episode: Option<f32>,
|
||||||
from_season: Option<u32>,
|
from_season: Option<u32>,
|
||||||
to_season: Option<u32>,
|
to_season: Option<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug)]
|
||||||
pub struct UrlFilter {
|
pub struct UrlFilter {
|
||||||
inner: Vec<InnerUrlFilter>,
|
inner: Vec<InnerUrlFilter>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for UrlFilter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
inner: vec![InnerUrlFilter::default()],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl UrlFilter {
|
impl UrlFilter {
|
||||||
pub fn is_season_valid(&self, season: u32) -> bool {
|
pub fn is_season_valid(&self, season: u32) -> bool {
|
||||||
self.inner.iter().any(|f| {
|
self.inner.iter().any(|f| {
|
||||||
|
|
@ -31,17 +39,20 @@ impl UrlFilter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_episode_valid(&self, episode: u32, season: u32) -> bool {
|
pub fn is_episode_valid(&self, episode: f32, season: u32) -> bool {
|
||||||
self.inner.iter().any(|f| {
|
self.inner.iter().any(|f| {
|
||||||
let from_episode = f.from_episode.unwrap_or(u32::MIN);
|
let from_episode = f.from_episode.unwrap_or(f32::MIN);
|
||||||
let to_episode = f.to_episode.unwrap_or(u32::MAX);
|
let to_episode = f.to_episode.unwrap_or(f32::MAX);
|
||||||
let from_season = f.from_season.unwrap_or(u32::MIN);
|
let from_season = f.from_season.unwrap_or(u32::MIN);
|
||||||
let to_season = f.to_season.unwrap_or(u32::MAX);
|
let to_season = f.to_season.unwrap_or(u32::MAX);
|
||||||
|
|
||||||
episode >= from_episode
|
if season < from_season || season > to_season {
|
||||||
&& episode <= to_episode
|
false
|
||||||
&& season >= from_season
|
} else if season == from_season || (f.from_season.is_none() && f.to_season.is_none()) {
|
||||||
&& season <= to_season
|
episode >= from_episode && episode <= to_episode
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -122,7 +133,21 @@ pub async fn parse_url(
|
||||||
UrlFilter::default()
|
UrlFilter::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let parsed_url = crunchyroll_rs::parse_url(url).map_or(Err(anyhow!("Invalid url")), Ok)?;
|
// check if the url is the old series/episode scheme which still occurs in some places (like the
|
||||||
|
// rss)
|
||||||
|
let old_url_regex = Regex::new(r"https?://(www\.)?crunchyroll\.com/.+").unwrap();
|
||||||
|
if old_url_regex.is_match(&url) {
|
||||||
|
debug!("Detected maybe old url");
|
||||||
|
// replace the 'http' prefix with 'https' as http is not supported by the reqwest client
|
||||||
|
if url.starts_with("http://") {
|
||||||
|
url.replace_range(0..4, "https")
|
||||||
|
}
|
||||||
|
// the old url redirects to the new url. request the old url, follow the redirects and
|
||||||
|
// extract the final url
|
||||||
|
url = crunchy.client().get(&url).send().await?.url().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsed_url = crunchyroll_rs::parse_url(url).ok_or(anyhow!("Invalid url"))?;
|
||||||
debug!("Url type: {:?}", parsed_url);
|
debug!("Url type: {:?}", parsed_url);
|
||||||
let media_collection = match parsed_url {
|
let media_collection = match parsed_url {
|
||||||
UrlType::Series(id)
|
UrlType::Series(id)
|
||||||
|
|
@ -170,3 +195,13 @@ pub fn parse_resolution(mut resolution: String) -> Result<Resolution> {
|
||||||
bail!("Could not find resolution")
|
bail!("Could not find resolution")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Dirty implementation of [`f32::fract`] with more accuracy.
|
||||||
|
pub fn fract(input: f32) -> f32 {
|
||||||
|
if input.fract() == 0.0 {
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
format!("0.{}", input.to_string().split('.').last().unwrap())
|
||||||
|
.parse::<f32>()
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
|
||||||
73
crunchy-cli-core/src/utils/rate_limit.rs
Normal file
73
crunchy-cli-core/src/utils/rate_limit.rs
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
use async_speed_limit::Limiter;
|
||||||
|
use crunchyroll_rs::error::Error;
|
||||||
|
use futures_util::TryStreamExt;
|
||||||
|
use reqwest::{Client, Request, Response, ResponseBuilderExt};
|
||||||
|
use std::future::Future;
|
||||||
|
use std::io;
|
||||||
|
use std::pin::Pin;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::task::{Context, Poll};
|
||||||
|
use tower_service::Service;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct RateLimiterService {
|
||||||
|
client: Arc<Client>,
|
||||||
|
rate_limiter: Limiter,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RateLimiterService {
|
||||||
|
pub fn new(bytes: u32, client: Client) -> Self {
|
||||||
|
Self {
|
||||||
|
client: Arc::new(client),
|
||||||
|
rate_limiter: Limiter::new(bytes as f64),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Service<Request> for RateLimiterService {
|
||||||
|
type Response = Response;
|
||||||
|
type Error = Error;
|
||||||
|
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
|
||||||
|
|
||||||
|
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||||
|
Poll::Ready(Ok(()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn call(&mut self, req: Request) -> Self::Future {
|
||||||
|
let client = self.client.clone();
|
||||||
|
let rate_limiter = self.rate_limiter.clone();
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
let mut body = vec![];
|
||||||
|
let res = client.execute(req).await?;
|
||||||
|
let _url = res.url().clone().to_string();
|
||||||
|
let url = _url.as_str();
|
||||||
|
|
||||||
|
let mut http_res = http::Response::builder()
|
||||||
|
.url(res.url().clone())
|
||||||
|
.status(res.status())
|
||||||
|
.version(res.version());
|
||||||
|
*http_res.headers_mut().unwrap() = res.headers().clone();
|
||||||
|
http_res
|
||||||
|
.extensions_ref()
|
||||||
|
.unwrap()
|
||||||
|
.clone_from(&res.extensions());
|
||||||
|
|
||||||
|
let limiter = rate_limiter.limit(
|
||||||
|
res.bytes_stream()
|
||||||
|
.map_err(io::Error::other)
|
||||||
|
.into_async_read(),
|
||||||
|
);
|
||||||
|
|
||||||
|
futures_util::io::copy(limiter, &mut body)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Error::Request {
|
||||||
|
url: url.to_string(),
|
||||||
|
status: None,
|
||||||
|
message: e.to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Response::from(http_res.body(body).unwrap()))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
432
crunchy-cli-core/src/utils/sync.rs
Normal file
432
crunchy-cli-core/src/utils/sync.rs
Normal file
|
|
@ -0,0 +1,432 @@
|
||||||
|
use std::io::Read;
|
||||||
|
use std::process::Stdio;
|
||||||
|
use std::{
|
||||||
|
cmp,
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
mem,
|
||||||
|
ops::Not,
|
||||||
|
path::Path,
|
||||||
|
process::Command,
|
||||||
|
};
|
||||||
|
|
||||||
|
use chrono::TimeDelta;
|
||||||
|
use crunchyroll_rs::Locale;
|
||||||
|
use log::debug;
|
||||||
|
use tempfile::TempPath;
|
||||||
|
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use rusty_chromaprint::{Configuration, Fingerprinter};
|
||||||
|
|
||||||
|
use super::fmt::format_time_delta;
|
||||||
|
|
||||||
|
pub struct SyncAudio {
|
||||||
|
pub format_id: usize,
|
||||||
|
pub path: TempPath,
|
||||||
|
pub locale: Locale,
|
||||||
|
pub sample_rate: u32,
|
||||||
|
pub video_idx: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
struct TimeRange {
|
||||||
|
start: f64,
|
||||||
|
end: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sync_audios(
|
||||||
|
available_audios: &Vec<SyncAudio>,
|
||||||
|
sync_tolerance: u32,
|
||||||
|
sync_precision: u32,
|
||||||
|
) -> Result<Option<HashMap<usize, TimeDelta>>> {
|
||||||
|
let mut result: HashMap<usize, TimeDelta> = HashMap::new();
|
||||||
|
|
||||||
|
let mut sync_audios = vec![];
|
||||||
|
let mut chromaprints = HashMap::new();
|
||||||
|
let mut formats = HashSet::new();
|
||||||
|
for audio in available_audios {
|
||||||
|
if formats.contains(&audio.format_id) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
formats.insert(audio.format_id);
|
||||||
|
sync_audios.push((audio.format_id, &audio.path, audio.sample_rate));
|
||||||
|
chromaprints.insert(
|
||||||
|
audio.format_id,
|
||||||
|
generate_chromaprint(
|
||||||
|
&audio.path,
|
||||||
|
audio.sample_rate,
|
||||||
|
&TimeDelta::zero(),
|
||||||
|
&TimeDelta::zero(),
|
||||||
|
&TimeDelta::zero(),
|
||||||
|
)?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
sync_audios.sort_by_key(|sync_audio| chromaprints.get(&sync_audio.0).unwrap().len());
|
||||||
|
|
||||||
|
let base_audio = sync_audios.remove(0);
|
||||||
|
|
||||||
|
let mut start = f64::MAX;
|
||||||
|
let mut end = f64::MIN;
|
||||||
|
let mut initial_offsets = HashMap::new();
|
||||||
|
for audio in &sync_audios {
|
||||||
|
debug!(
|
||||||
|
"Initial comparison of format {} to {}",
|
||||||
|
audio.0, &base_audio.0
|
||||||
|
);
|
||||||
|
|
||||||
|
let (lhs_ranges, rhs_ranges) = compare_chromaprints(
|
||||||
|
chromaprints.get(&base_audio.0).unwrap(),
|
||||||
|
chromaprints.get(&audio.0).unwrap(),
|
||||||
|
sync_tolerance,
|
||||||
|
);
|
||||||
|
if lhs_ranges.is_empty() || rhs_ranges.is_empty() {
|
||||||
|
bail!(
|
||||||
|
"Failed to sync videos, couldn't find matching audio parts between format {} and {}",
|
||||||
|
base_audio.0 + 1,
|
||||||
|
audio.0 + 1
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let lhs_range = lhs_ranges[0];
|
||||||
|
let rhs_range = rhs_ranges[0];
|
||||||
|
start = start.min(lhs_range.start);
|
||||||
|
end = end.max(lhs_range.end);
|
||||||
|
start = start.min(rhs_range.start);
|
||||||
|
end = end.max(rhs_range.end);
|
||||||
|
let offset = TimeDelta::milliseconds(((rhs_range.start - lhs_range.start) * 1000.0) as i64);
|
||||||
|
initial_offsets.insert(audio.0, TimeDelta::zero().checked_sub(&offset).unwrap());
|
||||||
|
debug!(
|
||||||
|
"Found initial offset of {}ms ({} - {} {}s) ({} - {} {}s) for format {} to {}",
|
||||||
|
offset.num_milliseconds(),
|
||||||
|
lhs_range.start,
|
||||||
|
lhs_range.end,
|
||||||
|
lhs_range.end - lhs_range.start,
|
||||||
|
rhs_range.start,
|
||||||
|
rhs_range.end,
|
||||||
|
rhs_range.end - rhs_range.start,
|
||||||
|
audio.0,
|
||||||
|
base_audio.0
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
"Found matching audio parts at {} - {}, narrowing search",
|
||||||
|
start, end
|
||||||
|
);
|
||||||
|
|
||||||
|
let start = TimeDelta::milliseconds((start * 1000.0) as i64 - 20000);
|
||||||
|
let end = TimeDelta::milliseconds((end * 1000.0) as i64 + 20000);
|
||||||
|
|
||||||
|
for sync_audio in &sync_audios {
|
||||||
|
let chromaprint = generate_chromaprint(
|
||||||
|
sync_audio.1,
|
||||||
|
sync_audio.2,
|
||||||
|
&start,
|
||||||
|
&end,
|
||||||
|
initial_offsets.get(&sync_audio.0).unwrap(),
|
||||||
|
)?;
|
||||||
|
chromaprints.insert(sync_audio.0, chromaprint);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut runs: HashMap<usize, i64> = HashMap::new();
|
||||||
|
let iterator_range_limits: i64 = 2 ^ sync_precision as i64;
|
||||||
|
for i in -iterator_range_limits..=iterator_range_limits {
|
||||||
|
let base_offset = TimeDelta::milliseconds(
|
||||||
|
((0.128 / iterator_range_limits as f64 * i as f64) * 1000.0) as i64,
|
||||||
|
);
|
||||||
|
chromaprints.insert(
|
||||||
|
base_audio.0,
|
||||||
|
generate_chromaprint(base_audio.1, base_audio.2, &start, &end, &base_offset)?,
|
||||||
|
);
|
||||||
|
for audio in &sync_audios {
|
||||||
|
let initial_offset = initial_offsets.get(&audio.0).copied().unwrap();
|
||||||
|
let offset = find_offset(
|
||||||
|
(&base_audio.0, chromaprints.get(&base_audio.0).unwrap()),
|
||||||
|
&base_offset,
|
||||||
|
(&audio.0, chromaprints.get(&audio.0).unwrap()),
|
||||||
|
&initial_offset,
|
||||||
|
&start,
|
||||||
|
sync_tolerance,
|
||||||
|
);
|
||||||
|
if offset.is_none() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let offset = offset.unwrap();
|
||||||
|
|
||||||
|
result.insert(
|
||||||
|
audio.0,
|
||||||
|
result
|
||||||
|
.get(&audio.0)
|
||||||
|
.copied()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.checked_add(&offset)
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
runs.insert(audio.0, runs.get(&audio.0).copied().unwrap_or_default() + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut result: HashMap<usize, TimeDelta> = result
|
||||||
|
.iter()
|
||||||
|
.map(|(format_id, offset)| {
|
||||||
|
(
|
||||||
|
*format_id,
|
||||||
|
TimeDelta::milliseconds(
|
||||||
|
offset.num_milliseconds() / runs.get(format_id).copied().unwrap(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
result.insert(base_audio.0, TimeDelta::milliseconds(0));
|
||||||
|
|
||||||
|
Ok(Some(result))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_offset(
|
||||||
|
lhs: (&usize, &Vec<u32>),
|
||||||
|
lhs_shift: &TimeDelta,
|
||||||
|
rhs: (&usize, &Vec<u32>),
|
||||||
|
rhs_shift: &TimeDelta,
|
||||||
|
start: &TimeDelta,
|
||||||
|
sync_tolerance: u32,
|
||||||
|
) -> Option<TimeDelta> {
|
||||||
|
let (lhs_ranges, rhs_ranges) = compare_chromaprints(lhs.1, rhs.1, sync_tolerance);
|
||||||
|
if lhs_ranges.is_empty() || rhs_ranges.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let lhs_range = lhs_ranges[0];
|
||||||
|
let rhs_range = rhs_ranges[0];
|
||||||
|
let offset = rhs_range.end - lhs_range.end;
|
||||||
|
let offset = TimeDelta::milliseconds((offset * 1000.0) as i64)
|
||||||
|
.checked_add(lhs_shift)?
|
||||||
|
.checked_sub(rhs_shift)?;
|
||||||
|
debug!(
|
||||||
|
"Found offset of {}ms ({} - {} {}s) ({} - {} {}s) for format {} to {}",
|
||||||
|
offset.num_milliseconds(),
|
||||||
|
lhs_range.start + start.num_milliseconds() as f64 / 1000.0,
|
||||||
|
lhs_range.end + start.num_milliseconds() as f64 / 1000.0,
|
||||||
|
lhs_range.end - lhs_range.start,
|
||||||
|
rhs_range.start + start.num_milliseconds() as f64 / 1000.0,
|
||||||
|
rhs_range.end + start.num_milliseconds() as f64 / 1000.0,
|
||||||
|
rhs_range.end - rhs_range.start,
|
||||||
|
rhs.0,
|
||||||
|
lhs.0
|
||||||
|
);
|
||||||
|
Some(offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_chromaprint(
|
||||||
|
input_file: &Path,
|
||||||
|
sample_rate: u32,
|
||||||
|
start: &TimeDelta,
|
||||||
|
end: &TimeDelta,
|
||||||
|
offset: &TimeDelta,
|
||||||
|
) -> Result<Vec<u32>> {
|
||||||
|
let mut ss_argument: &TimeDelta = &start.checked_sub(offset).unwrap();
|
||||||
|
let mut offset_argument = &TimeDelta::zero();
|
||||||
|
if *offset < TimeDelta::zero() {
|
||||||
|
ss_argument = start;
|
||||||
|
offset_argument = offset;
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut printer = Fingerprinter::new(&Configuration::preset_test1());
|
||||||
|
printer.start(sample_rate, 2)?;
|
||||||
|
|
||||||
|
let mut command = Command::new("ffmpeg");
|
||||||
|
command
|
||||||
|
.arg("-hide_banner")
|
||||||
|
.arg("-y")
|
||||||
|
.args(["-ss", format_time_delta(ss_argument).as_str()]);
|
||||||
|
|
||||||
|
if end.is_zero().not() {
|
||||||
|
command.args(["-to", format_time_delta(end).as_str()]);
|
||||||
|
}
|
||||||
|
|
||||||
|
command
|
||||||
|
.args(["-itsoffset", format_time_delta(offset_argument).as_str()])
|
||||||
|
.args(["-i", input_file.to_string_lossy().to_string().as_str()])
|
||||||
|
.args(["-ac", "2"])
|
||||||
|
.args([
|
||||||
|
"-f",
|
||||||
|
if cfg!(target_endian = "big") {
|
||||||
|
"s16be"
|
||||||
|
} else {
|
||||||
|
"s16le"
|
||||||
|
},
|
||||||
|
])
|
||||||
|
.arg("-");
|
||||||
|
|
||||||
|
let mut handle = command
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()?;
|
||||||
|
|
||||||
|
// the stdout is read in chunks because keeping all the raw audio data in memory would take up
|
||||||
|
// a significant amount of space
|
||||||
|
let mut stdout = handle.stdout.take().unwrap();
|
||||||
|
let mut buf: [u8; 128_000] = [0; 128_000];
|
||||||
|
while handle.try_wait()?.is_none() {
|
||||||
|
loop {
|
||||||
|
let read_bytes = stdout.read(&mut buf)?;
|
||||||
|
if read_bytes == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let data: [i16; 64_000] = unsafe { mem::transmute(buf) };
|
||||||
|
printer.consume(&data[0..(read_bytes / 2)])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !handle.wait()?.success() {
|
||||||
|
bail!("{}", std::io::read_to_string(handle.stderr.unwrap())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
printer.finish();
|
||||||
|
return Ok(printer.fingerprint().into());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compare_chromaprints(
|
||||||
|
lhs_chromaprint: &Vec<u32>,
|
||||||
|
rhs_chromaprint: &Vec<u32>,
|
||||||
|
sync_tolerance: u32,
|
||||||
|
) -> (Vec<TimeRange>, Vec<TimeRange>) {
|
||||||
|
let lhs_inverse_index = create_inverse_index(lhs_chromaprint);
|
||||||
|
let rhs_inverse_index = create_inverse_index(rhs_chromaprint);
|
||||||
|
|
||||||
|
let mut possible_shifts = HashSet::new();
|
||||||
|
for lhs_pair in lhs_inverse_index {
|
||||||
|
let original_point = lhs_pair.0;
|
||||||
|
for i in -2..=2 {
|
||||||
|
let modified_point = (original_point as i32 + i) as u32;
|
||||||
|
if rhs_inverse_index.contains_key(&modified_point) {
|
||||||
|
let rhs_index = rhs_inverse_index.get(&modified_point).copied().unwrap();
|
||||||
|
possible_shifts.insert(rhs_index as i32 - lhs_pair.1 as i32);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut all_lhs_time_ranges = vec![];
|
||||||
|
let mut all_rhs_time_ranges = vec![];
|
||||||
|
for shift_amount in possible_shifts {
|
||||||
|
let time_range_pair = find_time_ranges(
|
||||||
|
lhs_chromaprint,
|
||||||
|
rhs_chromaprint,
|
||||||
|
shift_amount,
|
||||||
|
sync_tolerance,
|
||||||
|
);
|
||||||
|
if time_range_pair.is_none() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let (mut lhs_time_ranges, mut rhs_time_ranges) = time_range_pair.unwrap();
|
||||||
|
let mut lhs_time_ranges: Vec<TimeRange> = lhs_time_ranges
|
||||||
|
.drain(..)
|
||||||
|
.filter(|time_range| {
|
||||||
|
(20.0 < (time_range.end - time_range.start))
|
||||||
|
&& ((time_range.end - time_range.start) < 180.0)
|
||||||
|
&& time_range.end > 0.0
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
lhs_time_ranges.sort_by(|a, b| (b.end - b.start).total_cmp(&(a.end - a.start)));
|
||||||
|
let mut rhs_time_ranges: Vec<TimeRange> = rhs_time_ranges
|
||||||
|
.drain(..)
|
||||||
|
.filter(|time_range| {
|
||||||
|
(20.0 < (time_range.end - time_range.start))
|
||||||
|
&& ((time_range.end - time_range.start) < 180.0)
|
||||||
|
&& time_range.end > 0.0
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
rhs_time_ranges.sort_by(|a, b| (b.end - b.start).total_cmp(&(a.end - a.start)));
|
||||||
|
if lhs_time_ranges.is_empty() || rhs_time_ranges.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
all_lhs_time_ranges.push(lhs_time_ranges[0]);
|
||||||
|
all_rhs_time_ranges.push(rhs_time_ranges[0]);
|
||||||
|
}
|
||||||
|
all_lhs_time_ranges.sort_by(|a, b| (a.end - a.start).total_cmp(&(b.end - b.start)));
|
||||||
|
all_lhs_time_ranges.reverse();
|
||||||
|
all_rhs_time_ranges.sort_by(|a, b| (a.end - a.start).total_cmp(&(b.end - b.start)));
|
||||||
|
all_rhs_time_ranges.reverse();
|
||||||
|
|
||||||
|
(all_lhs_time_ranges, all_rhs_time_ranges)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_inverse_index(chromaprint: &Vec<u32>) -> HashMap<u32, usize> {
|
||||||
|
let mut inverse_index = HashMap::with_capacity(chromaprint.capacity());
|
||||||
|
for (i, fingerprint) in chromaprint.iter().enumerate().take(chromaprint.capacity()) {
|
||||||
|
inverse_index.insert(*fingerprint, i);
|
||||||
|
}
|
||||||
|
inverse_index
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_time_ranges(
|
||||||
|
lhs_chromaprint: &[u32],
|
||||||
|
rhs_chromaprint: &[u32],
|
||||||
|
shift_amount: i32,
|
||||||
|
sync_tolerance: u32,
|
||||||
|
) -> Option<(Vec<TimeRange>, Vec<TimeRange>)> {
|
||||||
|
let mut lhs_shift: i32 = 0;
|
||||||
|
let mut rhs_shift: i32 = 0;
|
||||||
|
if shift_amount < 0 {
|
||||||
|
lhs_shift -= shift_amount;
|
||||||
|
} else {
|
||||||
|
rhs_shift += shift_amount;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut lhs_matching_timestamps = vec![];
|
||||||
|
let mut rhs_matching_timestamps = vec![];
|
||||||
|
let upper_limit =
|
||||||
|
cmp::min(lhs_chromaprint.len(), rhs_chromaprint.len()) as i32 - shift_amount.abs();
|
||||||
|
|
||||||
|
for i in 0..upper_limit {
|
||||||
|
let lhs_position = i + lhs_shift;
|
||||||
|
let rhs_position = i + rhs_shift;
|
||||||
|
let difference = (lhs_chromaprint[lhs_position as usize]
|
||||||
|
^ rhs_chromaprint[rhs_position as usize])
|
||||||
|
.count_ones();
|
||||||
|
|
||||||
|
if difference > sync_tolerance {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
lhs_matching_timestamps.push(lhs_position as f64 * 0.128);
|
||||||
|
rhs_matching_timestamps.push(rhs_position as f64 * 0.128);
|
||||||
|
}
|
||||||
|
lhs_matching_timestamps.push(f64::MAX);
|
||||||
|
rhs_matching_timestamps.push(f64::MAX);
|
||||||
|
|
||||||
|
let lhs_time_ranges = timestamps_to_ranges(lhs_matching_timestamps);
|
||||||
|
lhs_time_ranges.as_ref()?;
|
||||||
|
let lhs_time_ranges = lhs_time_ranges.unwrap();
|
||||||
|
let rhs_time_ranges = timestamps_to_ranges(rhs_matching_timestamps).unwrap();
|
||||||
|
|
||||||
|
Some((lhs_time_ranges, rhs_time_ranges))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn timestamps_to_ranges(mut timestamps: Vec<f64>) -> Option<Vec<TimeRange>> {
|
||||||
|
if timestamps.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
timestamps.sort_by(|a, b| a.total_cmp(b));
|
||||||
|
|
||||||
|
let mut time_ranges = vec![];
|
||||||
|
let mut current_range = TimeRange {
|
||||||
|
start: timestamps[0],
|
||||||
|
end: timestamps[0],
|
||||||
|
};
|
||||||
|
|
||||||
|
for i in 0..timestamps.len() - 1 {
|
||||||
|
let current = timestamps[i];
|
||||||
|
let next = timestamps[i + 1];
|
||||||
|
if next - current <= 1.0 {
|
||||||
|
current_range.end = next;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
time_ranges.push(current_range);
|
||||||
|
current_range.start = next;
|
||||||
|
current_range.end = next;
|
||||||
|
}
|
||||||
|
if !time_ranges.is_empty() {
|
||||||
|
Some(time_ranges)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,25 +1,46 @@
|
||||||
use anyhow::Result;
|
use anyhow::{bail, Result};
|
||||||
use crunchyroll_rs::media::{Resolution, Stream, VariantData};
|
use crunchyroll_rs::media::{Resolution, Stream, StreamData};
|
||||||
|
use crunchyroll_rs::Locale;
|
||||||
|
|
||||||
pub async fn variant_data_from_stream(
|
pub async fn stream_data_from_stream(
|
||||||
stream: &Stream,
|
stream: &Stream,
|
||||||
resolution: &Resolution,
|
resolution: &Resolution,
|
||||||
) -> Result<Option<(VariantData, VariantData)>> {
|
hardsub_subtitle: Option<Locale>,
|
||||||
let mut streaming_data = stream.dash_streaming_data(None).await?;
|
) -> Result<Option<(StreamData, StreamData, bool)>> {
|
||||||
streaming_data
|
let (hardsub_locale, mut contains_hardsub) = if hardsub_subtitle.is_some() {
|
||||||
.0
|
(hardsub_subtitle, true)
|
||||||
.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
|
} else {
|
||||||
streaming_data
|
(None, false)
|
||||||
.1
|
};
|
||||||
.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
|
|
||||||
|
let (mut videos, mut audios) = match stream.stream_data(hardsub_locale).await {
|
||||||
|
Ok(data) => data,
|
||||||
|
Err(e) => {
|
||||||
|
// the error variant is only `crunchyroll_rs::error::Error::Input` when the requested
|
||||||
|
// hardsub is not available
|
||||||
|
if let crunchyroll_rs::error::Error::Input { .. } = e {
|
||||||
|
contains_hardsub = false;
|
||||||
|
stream.stream_data(None).await?
|
||||||
|
} else {
|
||||||
|
bail!(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if videos.iter().any(|v| v.drm.is_some()) || audios.iter().any(|v| v.drm.is_some()) {
|
||||||
|
bail!("Stream is DRM protected")
|
||||||
|
}
|
||||||
|
|
||||||
|
videos.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
|
||||||
|
audios.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
|
||||||
|
|
||||||
let video_variant = match resolution.height {
|
let video_variant = match resolution.height {
|
||||||
u64::MAX => Some(streaming_data.0.into_iter().next().unwrap()),
|
u64::MAX => Some(videos.into_iter().next().unwrap()),
|
||||||
u64::MIN => Some(streaming_data.0.into_iter().last().unwrap()),
|
u64::MIN => Some(videos.into_iter().last().unwrap()),
|
||||||
_ => streaming_data
|
_ => videos
|
||||||
.0
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.find(|v| resolution.height == v.resolution.height),
|
.find(|v| resolution.height == v.resolution().unwrap().height),
|
||||||
};
|
};
|
||||||
Ok(video_variant.map(|v| (v, streaming_data.1.first().unwrap().clone())))
|
Ok(video_variant.map(|v| (v, audios.first().unwrap().clone(), contains_hardsub)))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
59
flake.lock
generated
Normal file
59
flake.lock
generated
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1710534455,
|
||||||
|
"narHash": "sha256-huQT4Xs0y4EeFKn2BTBVYgEwJSv8SDlm82uWgMnCMmI=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "9af9c1c87ed3e3ed271934cb896e0cdd33dae212",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "nixpkgs",
|
||||||
|
"ref": "nixpkgs-unstable",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs",
|
||||||
|
"utils": "utils"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1710146030,
|
||||||
|
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "flake-utils",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
76
flake.nix
Normal file
76
flake.nix
Normal file
|
|
@ -0,0 +1,76 @@
|
||||||
|
{
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "flake:nixpkgs/nixpkgs-unstable";
|
||||||
|
utils.url = "flake:flake-utils";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs, utils }: utils.lib.eachDefaultSystem
|
||||||
|
(system:
|
||||||
|
let
|
||||||
|
# enable musl on Linux will trigger a toolchain rebuild
|
||||||
|
# making the build very slow
|
||||||
|
pkgs = import nixpkgs { inherit system; };
|
||||||
|
# if nixpkgs.legacyPackages.${system}.stdenv.hostPlatform.isLinux
|
||||||
|
# then nixpkgs.legacyPackages.${system}.pkgsMusl
|
||||||
|
# else nixpkgs.legacyPackages.${system};
|
||||||
|
|
||||||
|
crunchy-cli = pkgs.rustPlatform.buildRustPackage.override { stdenv = pkgs.clangStdenv; } rec {
|
||||||
|
pname = "crunchy-cli";
|
||||||
|
inherit ((pkgs.lib.importTOML ./Cargo.toml).package) version;
|
||||||
|
|
||||||
|
src = pkgs.lib.cleanSource ./.;
|
||||||
|
|
||||||
|
cargoLock = {
|
||||||
|
lockFile = ./Cargo.lock;
|
||||||
|
allowBuiltinFetchGit = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
buildNoDefaultFeatures = true;
|
||||||
|
buildFeatures = [ "openssl-tls" ];
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
pkgs.pkg-config
|
||||||
|
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
|
||||||
|
pkgs.xcbuild
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = [
|
||||||
|
pkgs.openssl
|
||||||
|
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
|
||||||
|
pkgs.darwin.Security
|
||||||
|
];
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
packages.default = crunchy-cli;
|
||||||
|
|
||||||
|
devShells.default = pkgs.mkShell {
|
||||||
|
packages = with pkgs; [
|
||||||
|
cargo
|
||||||
|
clippy
|
||||||
|
rust-analyzer
|
||||||
|
rustc
|
||||||
|
rustfmt
|
||||||
|
];
|
||||||
|
|
||||||
|
inputsFrom = builtins.attrValues self.packages.${system};
|
||||||
|
|
||||||
|
buildInputs = [
|
||||||
|
pkgs.openssl
|
||||||
|
pkgs.libiconv
|
||||||
|
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
|
||||||
|
pkgs.darwin.apple_sdk.frameworks.CoreServices
|
||||||
|
pkgs.darwin.Security
|
||||||
|
];
|
||||||
|
|
||||||
|
RUST_SRC_PATH = pkgs.rustPlatform.rustLibSrc;
|
||||||
|
};
|
||||||
|
|
||||||
|
formatter = pkgs.nixpkgs-fmt;
|
||||||
|
}
|
||||||
|
) // {
|
||||||
|
overlays.default = final: prev: {
|
||||||
|
inherit (self.packages.${final.system}) crunchy-cli;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
10
src/main.rs
10
src/main.rs
|
|
@ -1,4 +1,12 @@
|
||||||
|
#[cfg(not(any(
|
||||||
|
feature = "rustls-tls",
|
||||||
|
feature = "native-tls",
|
||||||
|
feature = "openssl-tls",
|
||||||
|
feature = "openssl-tls-static"
|
||||||
|
)))]
|
||||||
|
compile_error!("At least one tls feature must be activated");
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
crunchy_cli_core::cli_entrypoint().await
|
crunchy_cli_core::main(&std::env::args().collect::<Vec<String>>()).await
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue