mirror of
https://github.com/crunchy-labs/crunchy-cli.git
synced 2026-01-21 12:12:00 -06:00
Merge branch 'next'
# Conflicts: # README.md # cli/commands/archive/archive.go # cli/commands/download/download.go # cli/commands/login/login.go # cli/root.go # crunchy-cli.1 # go.mod # go.sum # utils/locale.go
This commit is contained in:
commit
3e7d2583b7
51 changed files with 4146 additions and 3256 deletions
15
.github/dependabot.yml
vendored
15
.github/dependabot.yml
vendored
|
|
@ -1,6 +1,17 @@
|
||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
- package-ecosystem: "gomod"
|
- package-ecosystem: "cargo"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "weekly"
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: [ "version-update:semver-patch" ]
|
||||||
|
|
||||||
|
- package-ecosystem: "cargo"
|
||||||
|
directory: "/crunchy-cli-core"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: [ "version-update:semver-patch" ]
|
||||||
|
|
|
||||||
133
.github/workflows/ci.yml
vendored
133
.github/workflows/ci.yml
vendored
|
|
@ -1,20 +1,133 @@
|
||||||
name: CI
|
name: ci
|
||||||
|
|
||||||
on: [ push, pull_request ]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
toolchain: x86_64-unknown-linux-musl
|
||||||
|
- os: windows-latest
|
||||||
|
toolchain: x86_64-pc-windows-msvc
|
||||||
|
- os: macos-latest
|
||||||
|
toolchain: x86_64-apple-darwin
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Cargo cache # https://github.com/actions/cache/blob/main/examples.md#rust---cargo
|
||||||
uses: actions/setup-go@v3
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
go-version: 1.18
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
- name: Build
|
- name: Install toolchain
|
||||||
run: go build -v .
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
target: ${{ matrix.toolchain }}
|
||||||
|
default: true
|
||||||
|
|
||||||
- name: Test
|
- name: Test
|
||||||
run: go test -v .
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --all-features
|
||||||
|
|
||||||
|
build:
|
||||||
|
if: github.ref == 'refs/heads/master'
|
||||||
|
needs:
|
||||||
|
- test
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
toolchain: x86_64-unknown-linux-musl
|
||||||
|
ext:
|
||||||
|
output: crunchy_linux
|
||||||
|
- os: windows-latest
|
||||||
|
toolchain: x86_64-pc-windows-msvc
|
||||||
|
ext: .exe
|
||||||
|
output: crunchy_windows.exe
|
||||||
|
- os: macos-latest
|
||||||
|
toolchain: x86_64-apple-darwin
|
||||||
|
ext:
|
||||||
|
output: crunchy_darwin
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Cargo cache # https://github.com/actions/cache/blob/main/examples.md#rust---cargo
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Install toolchain
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
target: ${{ matrix.toolchain }}
|
||||||
|
default: true
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: build
|
||||||
|
args: --release --all-features
|
||||||
|
|
||||||
|
- name: Bundle manpages
|
||||||
|
uses: thedoctor0/zip-release@0.6
|
||||||
|
with:
|
||||||
|
type: zip
|
||||||
|
filename: manpages.zip
|
||||||
|
path: ./target/release/manpages
|
||||||
|
|
||||||
|
- name: Bundle completions
|
||||||
|
uses: thedoctor0/zip-release@0.6
|
||||||
|
with:
|
||||||
|
type: zip
|
||||||
|
filename: completions.zip
|
||||||
|
path: ./target/release/completions
|
||||||
|
|
||||||
|
- name: Upload binary artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.output }}
|
||||||
|
path: ./target/release/crunchy-cli${{ matrix.ext }}
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
- name: Upload manpages artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name:
|
||||||
|
path: ./manpages.zip
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
- name: Upload completions artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name:
|
||||||
|
path: ./completions.zip
|
||||||
|
if-no-files-found: error
|
||||||
|
|
|
||||||
67
.github/workflows/codeql-analysis.yml
vendored
67
.github/workflows/codeql-analysis.yml
vendored
|
|
@ -1,67 +0,0 @@
|
||||||
# For most projects, this workflow file will not need changing; you simply need
|
|
||||||
# to commit it to your repository.
|
|
||||||
#
|
|
||||||
# You may wish to alter this file to override the set of languages analyzed,
|
|
||||||
# or to provide custom queries or build logic.
|
|
||||||
#
|
|
||||||
# ******** NOTE ********
|
|
||||||
# We have attempted to detect the languages in your repository. Please check
|
|
||||||
# the `language` matrix defined below to confirm you have the correct set of
|
|
||||||
# supported CodeQL languages.
|
|
||||||
#
|
|
||||||
name: "CodeQL"
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
schedule:
|
|
||||||
- cron: '40 3 * * 2'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
analyze:
|
|
||||||
name: Analyze
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
language: [ 'go' ]
|
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
|
||||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v2
|
|
||||||
with:
|
|
||||||
languages: ${{ matrix.language }}
|
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
|
||||||
# By default, queries listed here will override any specified in a config file.
|
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
|
||||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
|
||||||
- name: Autobuild
|
|
||||||
uses: github/codeql-action/autobuild@v2
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
|
||||||
# 📚 https://git.io/JvXDl
|
|
||||||
|
|
||||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
|
||||||
# and modify them (or add more) to build your code if your project
|
|
||||||
# uses a compiled language
|
|
||||||
|
|
||||||
#- run: |
|
|
||||||
# make bootstrap
|
|
||||||
# make release
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v2
|
|
||||||
1645
Cargo.lock
generated
Normal file
1645
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
33
Cargo.toml
Normal file
33
Cargo.toml
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
[package]
|
||||||
|
name = "crunchy-cli"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["static-curl"]
|
||||||
|
|
||||||
|
# Embed a static curl library into the binary instead of just linking it.
|
||||||
|
static-curl = ["crunchy-cli-core/static-curl"]
|
||||||
|
# Embed a static openssl library into the binary instead of just linking it. If you want to compile this project against
|
||||||
|
# musl and have openssl issues, this might solve these issues.
|
||||||
|
static-ssl = ["crunchy-cli-core/static-ssl"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
tokio = { version = "1.22", features = ["macros", "rt-multi-thread", "time"], default-features = false }
|
||||||
|
|
||||||
|
crunchy-cli-core = { path = "./crunchy-cli-core" }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
chrono = "0.4"
|
||||||
|
clap = { version = "4.0", features = ["string"] }
|
||||||
|
clap_complete = "4.0"
|
||||||
|
clap_mangen = "0.2"
|
||||||
|
|
||||||
|
# The static-* features must be used here since build dependency features cannot be manipulated from the features
|
||||||
|
# specified in this Cargo.toml [features].
|
||||||
|
crunchy-cli-core = { path = "./crunchy-cli-core", features = ["static-curl", "static-ssl"] }
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
strip = true
|
||||||
|
opt-level = "z"
|
||||||
|
lto = true
|
||||||
31
Makefile
31
Makefile
|
|
@ -1,31 +0,0 @@
|
||||||
VERSION=development
|
|
||||||
BINARY_NAME=crunchy
|
|
||||||
VERSION_BINARY_NAME=$(BINARY_NAME)-v$(VERSION)
|
|
||||||
|
|
||||||
DESTDIR=
|
|
||||||
PREFIX=/usr
|
|
||||||
|
|
||||||
build:
|
|
||||||
go build -ldflags "-X 'github.com/crunchy-labs/crunchy-cli/utils.Version=$(VERSION)'" -o $(BINARY_NAME) .
|
|
||||||
|
|
||||||
clean:
|
|
||||||
rm -f $(BINARY_NAME) $(VERSION_BINARY_NAME)_*
|
|
||||||
|
|
||||||
install:
|
|
||||||
install -Dm755 $(BINARY_NAME) $(DESTDIR)$(PREFIX)/bin/crunchy-cli
|
|
||||||
ln -sf ./crunchy-cli $(DESTDIR)$(PREFIX)/bin/crunchy
|
|
||||||
install -Dm644 crunchy-cli.1 $(DESTDIR)$(PREFIX)/share/man/man1/crunchy-cli.1
|
|
||||||
install -Dm644 LICENSE $(DESTDIR)$(PREFIX)/share/licenses/crunchy-cli/LICENSE
|
|
||||||
|
|
||||||
uninstall:
|
|
||||||
rm -f $(DESTDIR)$(PREFIX)/bin/crunchy-cli
|
|
||||||
rm -f $(DESTDIR)$(PREFIX)/bin/crunchy
|
|
||||||
rm -f $(DESTDIR)$(PREFIX)/share/man/man1/crunchy-cli.1
|
|
||||||
rm -f $(DESTDIR)$(PREFIX)/share/licenses/crunchy-cli/LICENSE
|
|
||||||
|
|
||||||
release:
|
|
||||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "-X 'github.com/crunchy-labs/crunchy-cli/utils.Version=$(VERSION)'" -o $(VERSION_BINARY_NAME)_linux .
|
|
||||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 go build -ldflags "-X 'github.com/crunchy-labs/crunchy-cli/utils.Version=$(VERSION)'" -o $(VERSION_BINARY_NAME)_windows.exe .
|
|
||||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 go build -ldflags "-X 'github.com/crunchy-labs/crunchy-cli/utils.Version=$(VERSION)'" -o $(VERSION_BINARY_NAME)_darwin .
|
|
||||||
|
|
||||||
strip $(VERSION_BINARY_NAME)_linux
|
|
||||||
244
README.md
244
README.md
|
|
@ -1,6 +1,6 @@
|
||||||
# crunchy-cli
|
# crunchy-cli
|
||||||
|
|
||||||
A [Go](https://golang.org) written cli client for [crunchyroll](https://www.crunchyroll.com). To use it, you need a crunchyroll premium account for full access & features.
|
A [Rust](https://www.rust-lang.org/) written cli client for [Crunchyroll](https://www.crunchyroll.com).
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://github.com/crunchy-labs/crunchy-cli">
|
<a href="https://github.com/crunchy-labs/crunchy-cli">
|
||||||
|
|
@ -15,184 +15,210 @@ A [Go](https://golang.org) written cli client for [crunchyroll](https://www.crun
|
||||||
<a href="https://github.com/crunchy-labs/crunchy-cli/releases/latest">
|
<a href="https://github.com/crunchy-labs/crunchy-cli/releases/latest">
|
||||||
<img src="https://img.shields.io/github/v/release/crunchy-labs/crunchy-cli?style=flat-square" alt="Release">
|
<img src="https://img.shields.io/github/v/release/crunchy-labs/crunchy-cli?style=flat-square" alt="Release">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://discord.gg/PXGPGpQxgk">
|
<a href="https://discord.gg/gUWwekeNNg">
|
||||||
<img src="https://img.shields.io/discord/915659846836162561?label=discord&style=flat-square" alt="Discord">
|
<img src="https://img.shields.io/discord/915659846836162561?label=discord&style=flat-square" alt="Discord">
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="#%EF%B8%8F-cli">CLI 🖥️</a>
|
<a href="#%EF%B8%8F-usage">Usage 🖥️</a>
|
||||||
•
|
•
|
||||||
<a href="#%EF%B8%8F-disclaimer">Disclaimer ☝️</a>
|
<a href="#%EF%B8%8F-disclaimer">Disclaimer ☝️</a>
|
||||||
•
|
•
|
||||||
<a href="#-license">License ⚖</a>
|
<a href="#-license">License ⚖</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
_This repo was former known as **crunchyroll-go** (which still exists but now contains only the library part) but got split up into two separate repositories to provide more flexibility._
|
> We are in no way affiliated with, maintained, authorized, sponsored, or officially associated with Crunchyroll LLC or any of its subsidiaries or affiliates.
|
||||||
|
> The official Crunchyroll website can be found at https://crunchyroll.com/.
|
||||||
> This tool relies on the [crunchyroll-go](https://github.com/crunchy-labs/crunchyroll-go) library to communicate with crunchyroll.
|
|
||||||
> The library enters maintenance mode (only small fixes, no new features) with version v3 in favor of rewriting it completely in Rust.
|
|
||||||
> **crunchy-cli** follows it (with version v2.3.0) and won't have major updates until the Rust rewrite of the library reaches a good usable state.
|
|
||||||
|
|
||||||
# 🖥️ CLI
|
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
- Download single videos and entire series from [crunchyroll](https://www.crunchyroll.com).
|
- Download single videos and entire series from [Crunchyroll](https://www.crunchyroll.com).
|
||||||
- Archive episode or seasons in an `.mkv` file with multiple subtitles and audios and compress them to gzip or zip files.
|
- Archive episode or seasons in an `.mkv` file with multiple subtitles and audios.
|
||||||
- Specify a range which episodes to download from an anime.
|
- Specify a range which episodes to download from an anime.
|
||||||
|
|
||||||
## 💾 Get the executable
|
## 💾 Get the executable
|
||||||
|
|
||||||
- 📥 Download the latest binaries [here](https://github.com/crunchy-labs/crunchy-cli/releases/latest) or get it from below:
|
### 📥 Download the latest binaries
|
||||||
- [Linux (x64)](https://smartrelease.bytedream.org/github/crunchy-labs/crunchy-cli/crunchy-{tag}_linux)
|
|
||||||
- [Windows (x64)](https://smartrelease.bytedream.org/github/crunchy-labs/crunchy-cli/crunchy-{tag}_windows.exe)
|
~~Checkout the [releases](https://github.com/crunchy-labs/crunchy-cli/releases) tab and get the binary from the newest release.~~
|
||||||
- [MacOS (x64)](https://smartrelease.bytedream.org/github/crunchy-labs/crunchy-cli/crunchy-{tag}_darwin)
|
|
||||||
- If you use Arch btw. or any other Linux distro which is based on Arch Linux, you can download the package via the [AUR](https://aur.archlinux.org/packages/crunchyroll-go/):
|
Currently, no pre-built binary of the rewrite / this branch is available.
|
||||||
```shell
|
|
||||||
$ yay -S crunchy-cli
|
### 🛠 Build it yourself
|
||||||
```
|
|
||||||
- On Windows [scoop](https://scoop.sh/) can be used to install it (added by [@AdmnJ](https://github.com/AdmnJ)):
|
Since we do not support every platform and architecture you may have to build the project yourself.
|
||||||
```shell
|
This requires [git](https://git-scm.com/) and [Cargo](https://doc.rust-lang.org/cargo).
|
||||||
$ scoop bucket add extras # <- in case you haven't added the extra repository already
|
|
||||||
$ scoop install crunchyroll-go
|
|
||||||
```
|
|
||||||
- 🛠 Build it yourself. Must be done if your target platform is not covered by the [provided binaries](https://github.com/crunchy-labs/crunchy-cli/releases/latest) (like Raspberry Pi or M1 Mac):
|
|
||||||
- use `make` (requires `go` to be installed):
|
|
||||||
```shell
|
```shell
|
||||||
$ git clone https://github.com/crunchy-labs/crunchy-cli
|
$ git clone https://github.com/crunchy-labs/crunchy-cli
|
||||||
$ cd crunchy-cli
|
$ cd crunchy-cli
|
||||||
$ make
|
$ cargo build --release
|
||||||
$ sudo make install # <- only if you want to install it on your system
|
|
||||||
```
|
```
|
||||||
- use `go`:
|
After the binary has built successfully it is available in `target/release`.
|
||||||
```shell
|
|
||||||
$ git clone https://github.com/crunchy-labs/crunchy-cli
|
## 🖥️ Usage
|
||||||
$ cd crunchy-cli
|
|
||||||
$ go build -o crunchy .
|
> All shown command are just examples
|
||||||
|
|
||||||
|
Every command requires you to be logged in with an account.
|
||||||
|
It doesn't matter if this account is premium or not, both works (but as free user you do not have access to premium content).
|
||||||
|
You can pass your account via credentials (username & password) or refresh token.
|
||||||
|
|
||||||
|
- Refresh Token
|
||||||
|
- To get the token you have to log in at [crunchyroll.com](https://www.crunchyroll.com/) and extract the `etp_rt` cookie.
|
||||||
|
The easiest way to get it is via a browser extension with lets you view your cookies, like [Cookie-Editor](https://cookie-editor.cgagnier.ca/) ([Firefox Store](https://addons.mozilla.org/en-US/firefox/addon/cookie-editor/); [Chrome Store](https://chrome.google.com/webstore/detail/cookie-editor/hlkenndednhfkekhgcdicdfddnkalmdm)).
|
||||||
|
If installed, search the `etp_rt` entry and extract the value.
|
||||||
|
- ```shell
|
||||||
|
$ crunchy --etp-rt "abcd1234-zyxw-9876-98zy-a1b2c3d4e5f6"
|
||||||
|
```
|
||||||
|
- Credentials
|
||||||
|
- Credentials must be provided as one single expression.
|
||||||
|
Username and password must be separated by a `:`.
|
||||||
|
- ```shell
|
||||||
|
$ crunchy --credentials "user:password"
|
||||||
```
|
```
|
||||||
|
|
||||||
## 📝 Examples
|
|
||||||
|
|
||||||
_Before reading_: Because of the huge functionality not all cases can be covered in the README. Make sure to check the [wiki](https://github.com/crunchy-labs/crunchy-cli/wiki/Cli), further usages and options are described there.
|
|
||||||
|
|
||||||
### Login
|
### Login
|
||||||
|
|
||||||
Before you can do something, you have to log in first.
|
If you do not want to provide your credentials every time you execute a command, they can be stored permanently on disk.
|
||||||
|
This can be done with the `login` subcommand.
|
||||||
This can be performed via crunchyroll account email and password.
|
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy login user@example.com password
|
$ crunchy --etp-rt "abcd1234-zyxw-9876-98zy-a1b2c3d4e5f6" login
|
||||||
```
|
```
|
||||||
|
|
||||||
or via refresh token / `etp_rt` cookie
|
Once set, you do not need to provide `--etp-rt` / `--credentials` anymore when using the cli.
|
||||||
|
|
||||||
```shell
|
|
||||||
$ crunchy login --refresh-token 7578ce50-5712-3gef-b97e-01332d6b588c
|
|
||||||
```
|
|
||||||
|
|
||||||
### Download
|
### Download
|
||||||
|
|
||||||
By default, the cli tries to download the episode with your system language as audio. If no streams with your system language are available, the video will be downloaded with japanese audio and hardsubbed subtitles in your system language.
|
**Supported urls**
|
||||||
**If your system language is not supported, an error message will be displayed and en-US (american english) will be chosen as language.**
|
- Single episode
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ crunchy download https://www.crunchyroll.com/watch/GRDKJZ81Y/alone-and-lonesome
|
$ crunchy download https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|
```
|
||||||
|
- Episode range
|
||||||
|
|
||||||
|
If you want only specific episodes / seasons of an anime you can easily provide the series url along with a _filter_.
|
||||||
|
The filter has to be attached to the url. See the [wiki](https://github.com/crunchy-labs/crunchy-cli/wiki/Cli#filter) for more information
|
||||||
|
```shell
|
||||||
|
$ crunchy download https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx[E1]
|
||||||
|
```
|
||||||
|
- Series
|
||||||
|
```shell
|
||||||
|
$ crunchy download https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
With `-r best` the video(s) will have the best available resolution (mostly 1920x1080 / Full HD).
|
**Options**
|
||||||
|
- Audio language
|
||||||
|
|
||||||
|
Which audio the episode(s) should be can be set via the `-a` / `--audio` flag.
|
||||||
|
This only works if the url points to a series since episode urls are language specific.
|
||||||
```shell
|
```shell
|
||||||
$ crunchy download -r best https://www.crunchyroll.com/watch/GRDKJZ81Y/alone-and-lonesome
|
$ crunchy download -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
Default is your system language. If not supported by Crunchyroll, `en-US` (American English) is the default.
|
||||||
|
|
||||||
The file is by default saved as a `.ts` (mpeg transport stream) file.
|
- Subtitle language
|
||||||
`.ts` files may can't be played or are looking very weird (it depends on the video player you are using). With the `-o` flag, you can change the name (and file ending) of the output file. So if you want to save it as, for example, `mp4`
|
|
||||||
file, just name it `whatever.mp4`.
|
|
||||||
**You need [ffmpeg](https://ffmpeg.org) to store the video in other file formats.**
|
|
||||||
|
|
||||||
|
Besides the audio, it's also possible to specify which language the subtitles should have with the `-s` / `--subtitle` flag.
|
||||||
|
The subtitle will be hardsubbed (burned into the video) and thus, can't be turned off or on.
|
||||||
```shell
|
```shell
|
||||||
$ crunchy download -o "daaaaaaaaaaaaaaaarling.ts" https://www.crunchyroll.com/watch/GRDKJZ81Y/alone-and-lonesome
|
$ crunchy download -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
Default is no subtitle.
|
||||||
|
|
||||||
With the `--audio` flag you can specify which audio the video should have and with `--subtitle` which subtitle it should have. Type `crunchy help download` to see all available locales.
|
- Output filename
|
||||||
|
|
||||||
|
You can specify the name of the output file with the `-o` / `--output` flag.
|
||||||
|
If you want to use any other file format than [`.ts`](https://en.wikipedia.org/wiki/MPEG_transport_stream) you need [ffmpeg](https://ffmpeg.org/).
|
||||||
```shell
|
```shell
|
||||||
$ crunchy download --audio ja-JP --subtitle de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy download -o "ditf.ts" https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
```
|
```
|
||||||
|
Default is `{title}.ts`.
|
||||||
|
|
||||||
##### Flags
|
- Resolution
|
||||||
|
|
||||||
The following flags can be (optional) passed to modify the [download](#download) process.
|
The resolution for videos can be set via the `-r` / `--resolution` flag.
|
||||||
|
```shell
|
||||||
| Short | Extended | Description |
|
$ crunchy download -r worst https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
|
||||||
|-------|----------------|--------------------------------------------------------------------------------|
|
```
|
||||||
| `-a` | `--audio` | Forces audio of the video(s). |
|
Default is `best`.
|
||||||
| `-s` | `--subtitle` | Forces subtitle of the video(s). |
|
|
||||||
| `-d` | `--directory` | Directory to download the video(s) to. |
|
|
||||||
| `-o` | `--output` | Name of the output file. |
|
|
||||||
| `-r` | `--resolution` | The resolution of the video(s). `best` for best resolution, `worst` for worst. |
|
|
||||||
| `-g` | `--goroutines` | Sets how many parallel segment downloads should be used. |
|
|
||||||
|
|
||||||
### Archive
|
### Archive
|
||||||
|
|
||||||
Archive works just like [download](#download). It downloads the given videos as `.mkv` files and stores all (soft) subtitles in it. Default audio locales are japanese and your system language (if available) but you can set more or less with
|
**Supported urls**
|
||||||
the `--language` flag.
|
- Series
|
||||||
|
|
||||||
Archive a file
|
|
||||||
|
|
||||||
|
Only series urls are supported since single episode urls are (audio) language locked.
|
||||||
```shell
|
```shell
|
||||||
$ crunchy archive https://www.crunchyroll.com/watch/GRDKJZ81Y/alone-and-lonesome
|
$ crunchy archive https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
|
||||||
Downloads the first two episode of Darling in the FranXX and stores it compressed in a file.
|
**Options**
|
||||||
|
- Audio languages
|
||||||
|
|
||||||
|
Which audios the episode(s) should be can be set via the `-a` / `--audio` flag.
|
||||||
```shell
|
```shell
|
||||||
$ crunchy archive -c "ditf.tar.gz" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
$ crunchy archive -a ja-JP -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
Can be used multiple times.
|
||||||
|
Default is your system language (if not supported by Crunchyroll, `en-US` (American English) is the default) + `ja-JP` (Japanese).
|
||||||
|
|
||||||
##### Flags
|
- Subtitle languages
|
||||||
|
|
||||||
The following flags can be (optional) passed to modify the [archive](#archive) process.
|
|
||||||
|
|
||||||
| Short | Extended | Description |
|
|
||||||
|-------|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|
||||||
| `-l` | `--language` | Audio locale which should be downloaded. Can be used multiple times. |
|
|
||||||
| `-d` | `--directory` | Directory to download the video(s) to. |
|
|
||||||
| `-o` | `--output` | Name of the output file. |
|
|
||||||
| `-m` | `--merge` | Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio', 'video'. See the [wiki](https://github.com/crunchy-labs/crunchy-cli/wiki/Cli#archive) for more information. |
|
|
||||||
| `-c` | `--compress` | If is set, all output will be compresses into an archive. This flag sets the name of the compressed output file and the file ending specifies the compression algorithm (gzip, tar, zip are supported). |
|
|
||||||
| `-r` | `--resolution` | The resolution of the video(s). `best` for best resolution, `worst` for worst. |
|
|
||||||
| `-g` | `--goroutines` | Sets how many parallel segment downloads should be used. |
|
|
||||||
|
|
||||||
|
|
||||||
### Info
|
|
||||||
|
|
||||||
The `info` displays some information about the account which is used for the cli.
|
|
||||||
|
|
||||||
|
Besides the audio, it's also possible to specify which languages the subtitles should have with the `-s` / `--subtitle` flag.
|
||||||
```shell
|
```shell
|
||||||
$ crunchy info
|
$ crunchy archive -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
Default is all subtitles.
|
||||||
|
|
||||||
### Update
|
- Output filename
|
||||||
|
|
||||||
If you want to update your local version of `crunchy-cli`, this command makes this easier.
|
|
||||||
It checks if a new version is available and if so, updates itself.
|
|
||||||
|
|
||||||
|
You can specify the name of the output file with the `-o` / `--output` flag.
|
||||||
|
The only supported file / container format is [`.mkv`](https://en.wikipedia.org/wiki/Matroska) since it stores / can store multiple audio, video and subtitle streams.
|
||||||
```shell
|
```shell
|
||||||
$ crunchy update
|
$ crunchy archive -o "{title}.mkv" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
```
|
```
|
||||||
|
Default is `{title}.mkv`.
|
||||||
|
|
||||||
### Global flags
|
- Resolution
|
||||||
|
|
||||||
These flags you can use across every sub-command:
|
The resolution for videos can be set via the `-r` / `--resolution` flag.
|
||||||
|
```shell
|
||||||
|
$ crunchy archive -r worst https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
Default is `best`.
|
||||||
|
|
||||||
| Flag | Description |
|
- Merge behavior
|
||||||
|------|------------------------------------------------------|
|
|
||||||
| `-q` | Disables all output. |
|
Because of local restrictions (or other reasons) some episodes with different languages does not have the same length (e.g. when some scenes were cut out).
|
||||||
| `-v` | Shows additional debug output. |
|
The ideal state, when multiple audios & subtitles used, would be if only one _video_ has to be stored and all other languages can be stored as audio-only.
|
||||||
| `-p` | Use a proxy to hide your ip / redirect your traffic. |
|
But, as said, this is not always the case.
|
||||||
|
With the `-m` / `--merge` flag you can set what you want to do if some video lengths differ.
|
||||||
|
Valid options are `audio` - store one video and all other languages as audio only; `video` - store the video + audio for every language; `auto` - detect if videos differ in length: if so, behave like `video` else like `audio`.
|
||||||
|
Subtitles will always match to the first / primary audio and video.
|
||||||
|
```shell
|
||||||
|
$ crunchy archive -m audio https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
Default is `auto`.
|
||||||
|
|
||||||
|
- Default subtitle
|
||||||
|
|
||||||
|
`--default_subtitle` set which subtitle language should be set as default / auto appear when starting the downloaded video(s).
|
||||||
|
```shell
|
||||||
|
$ crunchy archive --default_subtitle en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
Default is none.
|
||||||
|
|
||||||
|
- No subtitle optimizations
|
||||||
|
|
||||||
|
Subtitles, as Crunchyroll delivers them, look weird in some video players (#66).
|
||||||
|
This can be fixed by adding a specific entry to the subtitles.
|
||||||
|
But since this entry is only a de-factor standard and not represented in the official specification of the subtitle format ([`.ass`](https://en.wikipedia.org/wiki/SubStation_Alpha)) it could cause issues with some video players (but no issue got reported so far, so it's relatively safe to use).
|
||||||
|
`--no_subtitle_optimizations` can disable these optimizations.
|
||||||
|
```shell
|
||||||
|
$ crunchy archive --no_subtitle_optimizations https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
|
||||||
|
```
|
||||||
|
|
||||||
# ☝️ Disclaimer
|
# ☝️ Disclaimer
|
||||||
|
|
||||||
|
|
|
||||||
112
build.rs
Normal file
112
build.rs
Normal file
|
|
@ -0,0 +1,112 @@
|
||||||
|
use clap::{Command, CommandFactory};
|
||||||
|
use clap_complete::shells;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
// this build file generates completions for various shells as well as manual pages
|
||||||
|
|
||||||
|
fn main() -> std::io::Result<()> {
|
||||||
|
// do not generate anything when building non release
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// note that we're using an anti-pattern here / violate the rust conventions. build script are
|
||||||
|
// not supposed to write outside of 'OUT_DIR'. to have the generated files in the build "root"
|
||||||
|
// (the same directory where the output binary lives) is much simpler than in 'OUT_DIR' since
|
||||||
|
// its nested in sub directories and is difficult to find (at least more difficult than in the
|
||||||
|
// build root)
|
||||||
|
let unconventional_out_dir =
|
||||||
|
std::path::PathBuf::from(std::env::var_os("OUT_DIR").ok_or(std::io::ErrorKind::NotFound)?)
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.to_path_buf();
|
||||||
|
|
||||||
|
let completions_dir = exist_or_create_dir(unconventional_out_dir.join("completions"))?;
|
||||||
|
let manpage_dir = exist_or_create_dir(unconventional_out_dir.join("manpages"))?;
|
||||||
|
|
||||||
|
generate_completions(completions_dir)?;
|
||||||
|
generate_manpages(manpage_dir)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn exist_or_create_dir(path: PathBuf) -> std::io::Result<PathBuf> {
|
||||||
|
if !path.exists() {
|
||||||
|
std::fs::create_dir(path.clone())?
|
||||||
|
}
|
||||||
|
Ok(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_completions(out_dir: PathBuf) -> std::io::Result<()> {
|
||||||
|
let mut command: Command = crunchy_cli_core::Cli::command();
|
||||||
|
|
||||||
|
clap_complete::generate_to(
|
||||||
|
shells::Bash,
|
||||||
|
&mut command.clone(),
|
||||||
|
"crunchy-cli",
|
||||||
|
out_dir.clone(),
|
||||||
|
)?;
|
||||||
|
clap_complete::generate_to(
|
||||||
|
shells::Elvish,
|
||||||
|
&mut command.clone(),
|
||||||
|
"crunchy-cli",
|
||||||
|
out_dir.clone(),
|
||||||
|
)?;
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
clap_complete::generate_to(
|
||||||
|
shells::Fish,
|
||||||
|
&mut command.clone(),
|
||||||
|
"crunchy-cli",
|
||||||
|
out_dir.clone(),
|
||||||
|
)?
|
||||||
|
.to_string_lossy()
|
||||||
|
);
|
||||||
|
clap_complete::generate_to(
|
||||||
|
shells::PowerShell,
|
||||||
|
&mut command.clone(),
|
||||||
|
"crunchy-cli",
|
||||||
|
out_dir.clone(),
|
||||||
|
)?;
|
||||||
|
clap_complete::generate_to(shells::Zsh, &mut command, "crunchy-cli", out_dir)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_manpages(out_dir: PathBuf) -> std::io::Result<()> {
|
||||||
|
fn generate_command_manpage(
|
||||||
|
mut command: Command,
|
||||||
|
base_path: &Path,
|
||||||
|
sub_name: &str,
|
||||||
|
) -> std::io::Result<()> {
|
||||||
|
let (file_name, title) = if sub_name.is_empty() {
|
||||||
|
command = command.name("crunchy-cli");
|
||||||
|
("crunchy-cli.1".to_string(), "crunchy-cli".to_string())
|
||||||
|
} else {
|
||||||
|
command = command.name(format!("crunchy-cli {}", sub_name));
|
||||||
|
(
|
||||||
|
format!("crunchy-cli-{}.1", sub_name),
|
||||||
|
format!("crunchy-cli-{}", sub_name),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut command_buf = vec![];
|
||||||
|
let man = clap_mangen::Man::new(command)
|
||||||
|
.title(title)
|
||||||
|
.date(chrono::Utc::now().format("%b %d, %Y").to_string());
|
||||||
|
man.render(&mut command_buf)?;
|
||||||
|
|
||||||
|
std::fs::write(base_path.join(file_name), command_buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
generate_command_manpage(crunchy_cli_core::Cli::command(), &out_dir, "")?;
|
||||||
|
generate_command_manpage(crunchy_cli_core::Archive::command(), &out_dir, "archive")?;
|
||||||
|
generate_command_manpage(crunchy_cli_core::Download::command(), &out_dir, "download")?;
|
||||||
|
generate_command_manpage(crunchy_cli_core::Login::command(), &out_dir, "login")?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
@ -1,849 +0,0 @@
|
||||||
package archive
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"math"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"os/signal"
|
|
||||||
"path/filepath"
|
|
||||||
"regexp"
|
|
||||||
"runtime"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/utils"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3"
|
|
||||||
crunchyUtils "github.com/crunchy-labs/crunchyroll-go/v3/utils"
|
|
||||||
"github.com/grafov/m3u8"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
archiveLanguagesFlag []string
|
|
||||||
archiveSubLanguagesFlag []string
|
|
||||||
|
|
||||||
archiveDirectoryFlag string
|
|
||||||
archiveOutputFlag string
|
|
||||||
archiveTempDirFlag string
|
|
||||||
|
|
||||||
archiveMergeFlag string
|
|
||||||
|
|
||||||
archiveCompressFlag string
|
|
||||||
|
|
||||||
archiveResolutionFlag string
|
|
||||||
|
|
||||||
archiveGoroutinesFlag int
|
|
||||||
|
|
||||||
archiveNoSubtitleOptimizations bool
|
|
||||||
)
|
|
||||||
|
|
||||||
var Cmd = &cobra.Command{
|
|
||||||
Use: "archive",
|
|
||||||
Short: "Stores the given videos with all subtitles and multiple audios in a .mkv file",
|
|
||||||
Args: cobra.MinimumNArgs(1),
|
|
||||||
|
|
||||||
PreRunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
utils.Log.Debug("Validating arguments")
|
|
||||||
|
|
||||||
if !utils.HasFFmpeg() {
|
|
||||||
return fmt.Errorf("ffmpeg is needed to run this command correctly")
|
|
||||||
}
|
|
||||||
utils.Log.Debug("FFmpeg detected")
|
|
||||||
|
|
||||||
if filepath.Ext(archiveOutputFlag) != ".mkv" {
|
|
||||||
return fmt.Errorf("currently only matroska / .mkv files are supported")
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, locale := range archiveLanguagesFlag {
|
|
||||||
if !crunchyUtils.ValidateLocale(crunchyroll.LOCALE(locale)) {
|
|
||||||
// if locale is 'all', match all known locales
|
|
||||||
if locale == "all" {
|
|
||||||
archiveLanguagesFlag = utils.LocalesAsStrings()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return fmt.Errorf("%s is not a valid locale. Choose from: %s", locale, strings.Join(utils.LocalesAsStrings(), ", "))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
utils.Log.Debug("Using following audio locales: %s", strings.Join(archiveLanguagesFlag, ", "))
|
|
||||||
|
|
||||||
for _, locale := range archiveSubLanguagesFlag {
|
|
||||||
if !crunchyUtils.ValidateLocale(crunchyroll.LOCALE(locale)) {
|
|
||||||
// if locale is 'all', match all known locales
|
|
||||||
if locale == "all" {
|
|
||||||
archiveSubLanguagesFlag = utils.LocalesAsStrings()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return fmt.Errorf("%s is not a valid locale for Subtitels. Choose from: %s", locale, strings.Join(utils.LocalesAsStrings(), ", "))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
utils.Log.Debug("Using following subtitels locales: %s", strings.Join(archiveSubLanguagesFlag, ", "))
|
|
||||||
|
|
||||||
var found bool
|
|
||||||
for _, mode := range []string{"auto", "audio", "video"} {
|
|
||||||
if archiveMergeFlag == mode {
|
|
||||||
utils.Log.Debug("Using %s merge behavior", archiveMergeFlag)
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
return fmt.Errorf("'%s' is no valid merge flag. Use 'auto', 'audio' or 'video'", archiveMergeFlag)
|
|
||||||
}
|
|
||||||
|
|
||||||
if archiveCompressFlag != "" {
|
|
||||||
found = false
|
|
||||||
for _, algo := range []string{".tar", ".tar.gz", ".tgz", ".zip"} {
|
|
||||||
if strings.HasSuffix(archiveCompressFlag, algo) {
|
|
||||||
utils.Log.Debug("Using %s compression", algo)
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
return fmt.Errorf("'%s' is no valid compress algorithm. Valid algorithms / file endings are '.tar', '.tar.gz', '.zip'",
|
|
||||||
archiveCompressFlag)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch archiveResolutionFlag {
|
|
||||||
case "1080p", "720p", "480p", "360p":
|
|
||||||
intRes, _ := strconv.ParseFloat(strings.TrimSuffix(archiveResolutionFlag, "p"), 84)
|
|
||||||
archiveResolutionFlag = fmt.Sprintf("%.0fx%s", math.Ceil(intRes*(float64(16)/float64(9))), strings.TrimSuffix(archiveResolutionFlag, "p"))
|
|
||||||
case "240p":
|
|
||||||
// 240p would round up to 427x240 if used in the case statement above, so it has to be handled separately
|
|
||||||
archiveResolutionFlag = "428x240"
|
|
||||||
case "1920x1080", "1280x720", "640x480", "480x360", "428x240", "best", "worst":
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("'%s' is not a valid resolution", archiveResolutionFlag)
|
|
||||||
}
|
|
||||||
utils.Log.Debug("Using resolution '%s'", archiveResolutionFlag)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
if err := commands.LoadCrunchy(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return archive(args)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
Cmd.Flags().StringSliceVarP(&archiveLanguagesFlag,
|
|
||||||
"language",
|
|
||||||
"l",
|
|
||||||
[]string{string(utils.SystemLocale(false)), string(crunchyroll.JP)},
|
|
||||||
"Audio locale which should be downloaded. Can be used multiple times")
|
|
||||||
|
|
||||||
Cmd.Flags().StringSliceVarP(&archiveSubLanguagesFlag,
|
|
||||||
"sublang",
|
|
||||||
"s",
|
|
||||||
utils.LocalesAsStrings(),
|
|
||||||
"Subtitles langs which should be downloaded. Can be used multiple times")
|
|
||||||
|
|
||||||
cwd, _ := os.Getwd()
|
|
||||||
|
|
||||||
Cmd.Flags().StringVarP(&archiveDirectoryFlag,
|
|
||||||
"directory",
|
|
||||||
"d",
|
|
||||||
cwd,
|
|
||||||
"The directory to store the files into")
|
|
||||||
|
|
||||||
Cmd.Flags().StringVarP(&archiveOutputFlag,
|
|
||||||
"output",
|
|
||||||
"o",
|
|
||||||
"{title}.mkv",
|
|
||||||
"Name of the output file. If you use the following things in the name, the will get replaced:\n"+
|
|
||||||
"\t{title} » Title of the video\n"+
|
|
||||||
"\t{series_name} » Name of the series\n"+
|
|
||||||
"\t{season_name} » Name of the season\n"+
|
|
||||||
"\t{season_number} » Number of the season\n"+
|
|
||||||
"\t{episode_number} » Number of the episode\n"+
|
|
||||||
"\t{resolution} » Resolution of the video\n"+
|
|
||||||
"\t{fps} » Frame Rate of the video\n"+
|
|
||||||
"\t{audio} » Audio locale of the video\n"+
|
|
||||||
"\t{subtitle} » Subtitle locale of the video")
|
|
||||||
|
|
||||||
Cmd.Flags().StringVar(&archiveTempDirFlag,
|
|
||||||
"temp",
|
|
||||||
os.TempDir(),
|
|
||||||
"Directory to store temporary files in")
|
|
||||||
|
|
||||||
Cmd.Flags().StringVarP(&archiveMergeFlag,
|
|
||||||
"merge",
|
|
||||||
"m",
|
|
||||||
"auto",
|
|
||||||
"Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio', 'video'")
|
|
||||||
|
|
||||||
Cmd.Flags().StringVarP(&archiveCompressFlag,
|
|
||||||
"compress",
|
|
||||||
"c",
|
|
||||||
"",
|
|
||||||
"If is set, all output will be compresses into an archive (every url generates a new one). "+
|
|
||||||
"This flag sets the name of the compressed output file. The file ending specifies the compression algorithm. "+
|
|
||||||
"The following algorithms are supported: gzip, tar, zip")
|
|
||||||
|
|
||||||
Cmd.Flags().StringVarP(&archiveResolutionFlag,
|
|
||||||
"resolution",
|
|
||||||
"r",
|
|
||||||
"best",
|
|
||||||
"The video resolution. Can either be specified via the pixels, the abbreviation for pixels, or 'common-use' words\n"+
|
|
||||||
"\tAvailable pixels: 1920x1080, 1280x720, 640x480, 480x360, 428x240\n"+
|
|
||||||
"\tAvailable abbreviations: 1080p, 720p, 480p, 360p, 240p\n"+
|
|
||||||
"\tAvailable common-use words: best (best available resolution), worst (worst available resolution)")
|
|
||||||
|
|
||||||
Cmd.Flags().IntVarP(&archiveGoroutinesFlag,
|
|
||||||
"goroutines",
|
|
||||||
"g",
|
|
||||||
runtime.NumCPU(),
|
|
||||||
"Number of parallel segment downloads")
|
|
||||||
|
|
||||||
Cmd.Flags().BoolVar(&archiveNoSubtitleOptimizations,
|
|
||||||
"no-subtitle-optimizations",
|
|
||||||
false,
|
|
||||||
"Disable subtitle optimizations. See https://github.com/crunchy-labs/crunchy-cli/issues/66 for more information")
|
|
||||||
}
|
|
||||||
|
|
||||||
func archive(urls []string) error {
|
|
||||||
for i, url := range urls {
|
|
||||||
utils.Log.SetProcess("Parsing url %d", i+1)
|
|
||||||
episodes, err := archiveExtractEpisodes(url)
|
|
||||||
if err != nil {
|
|
||||||
utils.Log.StopProcess("Failed to parse url %d", i+1)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
utils.Log.StopProcess("Parsed url %d", i+1)
|
|
||||||
|
|
||||||
var compressFile *os.File
|
|
||||||
var c Compress
|
|
||||||
|
|
||||||
if archiveCompressFlag != "" {
|
|
||||||
compressFile, err = os.Create(utils.GenerateFilename(archiveCompressFlag, ""))
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to create archive file: %v", err)
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(archiveCompressFlag, ".tar") {
|
|
||||||
c = NewTarCompress(compressFile)
|
|
||||||
} else if strings.HasSuffix(archiveCompressFlag, ".tar.gz") || strings.HasSuffix(archiveCompressFlag, ".tgz") {
|
|
||||||
c = NewGzipCompress(compressFile)
|
|
||||||
} else if strings.HasSuffix(archiveCompressFlag, ".zip") {
|
|
||||||
c = NewZipCompress(compressFile)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, season := range episodes {
|
|
||||||
utils.Log.Info("%s Season %d", season[0].SeriesName, season[0].SeasonNumber)
|
|
||||||
|
|
||||||
for j, info := range season {
|
|
||||||
utils.Log.Info("\t%d. %s » %spx, %.2f FPS (S%02dE%02d)",
|
|
||||||
j+1,
|
|
||||||
info.Title,
|
|
||||||
info.Resolution,
|
|
||||||
info.FPS,
|
|
||||||
info.SeasonNumber,
|
|
||||||
info.EpisodeNumber)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
utils.Log.Empty()
|
|
||||||
|
|
||||||
for j, season := range episodes {
|
|
||||||
for k, info := range season {
|
|
||||||
var filename string
|
|
||||||
var writeCloser io.WriteCloser
|
|
||||||
if c != nil {
|
|
||||||
filename = info.FormatString(archiveOutputFlag)
|
|
||||||
writeCloser, err = c.NewFile(info)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to pre generate new archive file: %v", err)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
dir := info.FormatString(archiveDirectoryFlag)
|
|
||||||
if _, err = os.Stat(dir); os.IsNotExist(err) {
|
|
||||||
if err = os.MkdirAll(dir, 0777); err != nil {
|
|
||||||
return fmt.Errorf("error while creating directory: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
filename = utils.GenerateFilename(info.FormatString(archiveOutputFlag), dir)
|
|
||||||
writeCloser, err = os.Create(filename)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to create new file: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = archiveInfo(info, writeCloser, filename); err != nil {
|
|
||||||
writeCloser.Close()
|
|
||||||
if f, ok := writeCloser.(*os.File); ok {
|
|
||||||
os.Remove(f.Name())
|
|
||||||
} else {
|
|
||||||
c.Close()
|
|
||||||
compressFile.Close()
|
|
||||||
os.RemoveAll(compressFile.Name())
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
writeCloser.Close()
|
|
||||||
|
|
||||||
if i != len(urls)-1 || j != len(episodes)-1 || k != len(season)-1 {
|
|
||||||
utils.Log.Empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if c != nil {
|
|
||||||
c.Close()
|
|
||||||
}
|
|
||||||
if compressFile != nil {
|
|
||||||
compressFile.Close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func archiveInfo(info utils.FormatInformation, writeCloser io.WriteCloser, filename string) error {
|
|
||||||
utils.Log.Debug("Entering season %d, episode %d with %d additional formats", info.SeasonNumber, info.EpisodeNumber, len(info.AdditionalFormats))
|
|
||||||
|
|
||||||
dp, err := createArchiveProgress(info)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("error while setting up downloader: %v", err)
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
if dp.Total != dp.Current {
|
|
||||||
fmt.Println()
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
rootFile, err := os.CreateTemp("", fmt.Sprintf("%s_*.ts", strings.TrimSuffix(filepath.Base(filename), filepath.Ext(filename))))
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to create temp file: %v", err)
|
|
||||||
}
|
|
||||||
defer os.Remove(rootFile.Name())
|
|
||||||
defer rootFile.Close()
|
|
||||||
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
|
||||||
defer cancel()
|
|
||||||
downloader := crunchyroll.NewDownloader(ctx, rootFile, archiveGoroutinesFlag, func(segment *m3u8.MediaSegment, current, total int, file *os.File) error {
|
|
||||||
// check if the context was cancelled.
|
|
||||||
// must be done in to not print any progress messages if ctrl+c was pressed
|
|
||||||
if ctx.Err() != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if utils.Log.IsDev() {
|
|
||||||
dp.UpdateMessage(fmt.Sprintf("Downloading %d/%d (%.2f%%) » %s", current, total, float32(current)/float32(total)*100, segment.URI), false)
|
|
||||||
} else {
|
|
||||||
dp.Update()
|
|
||||||
}
|
|
||||||
|
|
||||||
if current == total {
|
|
||||||
dp.UpdateMessage("Merging segments", false)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
tmp, _ := os.MkdirTemp(archiveTempDirFlag, "crunchy_")
|
|
||||||
downloader.TempDir = tmp
|
|
||||||
|
|
||||||
sig := make(chan os.Signal, 1)
|
|
||||||
signal.Notify(sig, os.Interrupt)
|
|
||||||
go func() {
|
|
||||||
select {
|
|
||||||
case <-sig:
|
|
||||||
signal.Stop(sig)
|
|
||||||
utils.Log.Err("Exiting... (may take a few seconds)")
|
|
||||||
utils.Log.Err("To force exit press ctrl+c (again)")
|
|
||||||
cancel()
|
|
||||||
// os.Exit(1) is not called since an immediate exit after the cancel function does not let
|
|
||||||
// the download process enough time to stop gratefully. A result of this is that the temporary
|
|
||||||
// directory where the segments are downloaded to will not be deleted
|
|
||||||
case <-ctx.Done():
|
|
||||||
// this is just here to end the goroutine and prevent it from running forever without a reason
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
utils.Log.Debug("Set up signal catcher")
|
|
||||||
|
|
||||||
var additionalDownloaderOpts []string
|
|
||||||
var mergeMessage string
|
|
||||||
switch archiveMergeFlag {
|
|
||||||
case "auto":
|
|
||||||
additionalDownloaderOpts = []string{"-vn"}
|
|
||||||
for _, format := range info.AdditionalFormats {
|
|
||||||
if format.Video.Bandwidth != info.Format.Video.Bandwidth {
|
|
||||||
// revoke the changed FFmpegOpts above
|
|
||||||
additionalDownloaderOpts = []string{}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(additionalDownloaderOpts) > 0 {
|
|
||||||
mergeMessage = "merging audio for additional formats"
|
|
||||||
} else {
|
|
||||||
mergeMessage = "merging video for additional formats"
|
|
||||||
}
|
|
||||||
case "audio":
|
|
||||||
additionalDownloaderOpts = []string{"-vn"}
|
|
||||||
mergeMessage = "merging audio for additional formats"
|
|
||||||
case "video":
|
|
||||||
mergeMessage = "merging video for additional formats"
|
|
||||||
}
|
|
||||||
|
|
||||||
utils.Log.Info("Downloading episode `%s` to `%s` (%s)", info.Title, filepath.Base(filename), mergeMessage)
|
|
||||||
utils.Log.Info("\tEpisode: S%02dE%02d", info.SeasonNumber, info.EpisodeNumber)
|
|
||||||
utils.Log.Info("\tAudio: %s", info.Audio)
|
|
||||||
utils.Log.Info("\tSubtitle: %s", info.Subtitle)
|
|
||||||
utils.Log.Info("\tResolution: %spx", info.Resolution)
|
|
||||||
utils.Log.Info("\tFPS: %.2f", info.FPS)
|
|
||||||
|
|
||||||
var videoFiles, audioFiles, subtitleFiles []string
|
|
||||||
defer func() {
|
|
||||||
for _, f := range append(append(videoFiles, audioFiles...), subtitleFiles...) {
|
|
||||||
os.RemoveAll(f)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
var f []string
|
|
||||||
if f, err = archiveDownloadVideos(downloader, filepath.Base(filename), true, info.Format); err != nil {
|
|
||||||
if err != ctx.Err() {
|
|
||||||
return fmt.Errorf("error while downloading: %v", err)
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
videoFiles = append(videoFiles, f[0])
|
|
||||||
|
|
||||||
if len(additionalDownloaderOpts) == 0 {
|
|
||||||
var videos []string
|
|
||||||
downloader.FFmpegOpts = additionalDownloaderOpts
|
|
||||||
if videos, err = archiveDownloadVideos(downloader, filepath.Base(filename), true, info.AdditionalFormats...); err != nil {
|
|
||||||
return fmt.Errorf("error while downloading additional videos: %v", err)
|
|
||||||
}
|
|
||||||
downloader.FFmpegOpts = []string{}
|
|
||||||
videoFiles = append(videoFiles, videos...)
|
|
||||||
} else {
|
|
||||||
var audios []string
|
|
||||||
if audios, err = archiveDownloadVideos(downloader, filepath.Base(filename), false, info.AdditionalFormats...); err != nil {
|
|
||||||
return fmt.Errorf("error while downloading additional videos: %v", err)
|
|
||||||
}
|
|
||||||
audioFiles = append(audioFiles, audios...)
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(crunchyUtils.SubtitlesByLocale(info.Format.Subtitles))
|
|
||||||
|
|
||||||
sortSubtitles, _ := strconv.ParseBool(os.Getenv("SORT_SUBTITLES"))
|
|
||||||
if sortSubtitles && len(archiveLanguagesFlag) > 0 {
|
|
||||||
// this sort the subtitle locales after the languages which were specified
|
|
||||||
// with the `archiveLanguagesFlag` flag
|
|
||||||
for _, language := range archiveLanguagesFlag {
|
|
||||||
for i, subtitle := range info.Format.Subtitles {
|
|
||||||
if subtitle.Locale == crunchyroll.LOCALE(language) {
|
|
||||||
info.Format.Subtitles = append([]*crunchyroll.Subtitle{subtitle}, append(info.Format.Subtitles[:i], info.Format.Subtitles[i+1:]...)...)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var subtitles []string
|
|
||||||
if subtitles, err = archiveDownloadSubtitles(filepath.Base(filename), info.Format.Subtitles...); err != nil {
|
|
||||||
return fmt.Errorf("error while downloading subtitles: %v", err)
|
|
||||||
}
|
|
||||||
subtitleFiles = append(subtitleFiles, subtitles...)
|
|
||||||
|
|
||||||
if err = archiveFFmpeg(ctx, writeCloser, videoFiles, audioFiles, subtitleFiles); err != nil {
|
|
||||||
return fmt.Errorf("failed to merge files: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dp.UpdateMessage("Download finished", false)
|
|
||||||
|
|
||||||
signal.Stop(sig)
|
|
||||||
utils.Log.Debug("Stopped signal catcher")
|
|
||||||
|
|
||||||
utils.Log.Empty()
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func createArchiveProgress(info utils.FormatInformation) (*commands.DownloadProgress, error) {
|
|
||||||
var progressCount int
|
|
||||||
if err := info.Format.InitVideo(); err != nil {
|
|
||||||
return nil, fmt.Errorf("error while initializing a video: %v", err)
|
|
||||||
}
|
|
||||||
// + number of segments a video has +1 is for merging
|
|
||||||
progressCount += int(info.Format.Video.Chunklist.Count()) + 1
|
|
||||||
for _, f := range info.AdditionalFormats {
|
|
||||||
if f == info.Format {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := f.InitVideo(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// + number of segments a video has +1 is for merging
|
|
||||||
progressCount += int(f.Video.Chunklist.Count()) + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
dp := &commands.DownloadProgress{
|
|
||||||
Prefix: utils.Log.(*commands.Logger).InfoLog.Prefix(),
|
|
||||||
Message: "Downloading video",
|
|
||||||
// number of segments a video +1 is for the success message
|
|
||||||
Total: progressCount + 1,
|
|
||||||
Dev: utils.Log.IsDev(),
|
|
||||||
Quiet: utils.Log.(*commands.Logger).IsQuiet(),
|
|
||||||
}
|
|
||||||
if utils.Log.IsDev() {
|
|
||||||
dp.Prefix = utils.Log.(*commands.Logger).DebugLog.Prefix()
|
|
||||||
}
|
|
||||||
|
|
||||||
return dp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func archiveDownloadVideos(downloader crunchyroll.Downloader, filename string, video bool, formats ...*crunchyroll.Format) ([]string, error) {
|
|
||||||
var files []string
|
|
||||||
|
|
||||||
for _, format := range formats {
|
|
||||||
var name string
|
|
||||||
if video {
|
|
||||||
name = fmt.Sprintf("%s_%s_video_*.ts", filename, format.AudioLocale)
|
|
||||||
} else {
|
|
||||||
name = fmt.Sprintf("%s_%s_audio_*.aac", filename, format.AudioLocale)
|
|
||||||
}
|
|
||||||
|
|
||||||
f, err := os.CreateTemp("", name)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
files = append(files, f.Name())
|
|
||||||
|
|
||||||
downloader.Writer = f
|
|
||||||
if err = format.Download(downloader); err != nil {
|
|
||||||
f.Close()
|
|
||||||
for _, file := range files {
|
|
||||||
os.Remove(file)
|
|
||||||
}
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
f.Close()
|
|
||||||
|
|
||||||
utils.Log.Debug("Downloaded '%s' video", format.AudioLocale)
|
|
||||||
}
|
|
||||||
|
|
||||||
return files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func archiveDownloadSubtitles(filename string, subtitles ...*crunchyroll.Subtitle) ([]string, error) {
|
|
||||||
var files []string
|
|
||||||
|
|
||||||
for _, subtitle := range subtitles {
|
|
||||||
if !utils.ElementInSlice(string(subtitle.Locale), archiveSubLanguagesFlag) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
f, err := os.CreateTemp("", fmt.Sprintf("%s_%s_subtitle_*.ass", filename, subtitle.Locale))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
files = append(files, f.Name())
|
|
||||||
|
|
||||||
buffer := &bytes.Buffer{}
|
|
||||||
|
|
||||||
if err := subtitle.Save(buffer); err != nil {
|
|
||||||
f.Close()
|
|
||||||
for _, file := range files {
|
|
||||||
os.Remove(file)
|
|
||||||
}
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !archiveNoSubtitleOptimizations {
|
|
||||||
buffer2 := &bytes.Buffer{}
|
|
||||||
var scriptInfo bool
|
|
||||||
for _, line := range strings.Split(buffer.String(), "\n") {
|
|
||||||
if scriptInfo && strings.HasPrefix(strings.TrimSpace(line), "[") {
|
|
||||||
buffer2.WriteString("ScaledBorderAndShadows: yes\n")
|
|
||||||
scriptInfo = false
|
|
||||||
} else if strings.TrimSpace(line) == "[Script Info]" {
|
|
||||||
scriptInfo = true
|
|
||||||
}
|
|
||||||
buffer2.WriteString(line + "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err = io.Copy(f, buffer2); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if _, err = io.Copy(f, buffer); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
f.Close()
|
|
||||||
|
|
||||||
utils.Log.Debug("Downloaded '%s' subtitles", subtitle.Locale)
|
|
||||||
}
|
|
||||||
|
|
||||||
return files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func archiveFFmpeg(ctx context.Context, dst io.Writer, videoFiles, audioFiles, subtitleFiles []string) error {
|
|
||||||
var input, maps, metadata []string
|
|
||||||
re := regexp.MustCompile(`(?m)_([a-z]{2}-([A-Z]{2}|[0-9]{3}))_(video|audio|subtitle)`)
|
|
||||||
// https://github.com/crunchy-labs/crunchy-cli/issues/32
|
|
||||||
videoLength32Fix := regexp.MustCompile(`Duration:\s?(\d+):(\d+):(\d+).(\d+),`)
|
|
||||||
|
|
||||||
videoLength := [4]int{0, 0, 0, 0}
|
|
||||||
|
|
||||||
for i, video := range videoFiles {
|
|
||||||
input = append(input, "-i", video)
|
|
||||||
maps = append(maps, "-map", strconv.Itoa(i))
|
|
||||||
locale := crunchyroll.LOCALE(re.FindStringSubmatch(video)[1])
|
|
||||||
metadata = append(metadata, fmt.Sprintf("-metadata:s:v:%d", i), fmt.Sprintf("language=%s", locale))
|
|
||||||
metadata = append(metadata, fmt.Sprintf("-metadata:s:v:%d", i), fmt.Sprintf("title=%s", crunchyUtils.LocaleLanguage(locale)))
|
|
||||||
metadata = append(metadata, fmt.Sprintf("-metadata:s:a:%d", i), fmt.Sprintf("language=%s", locale))
|
|
||||||
metadata = append(metadata, fmt.Sprintf("-metadata:s:a:%d", i), fmt.Sprintf("title=%s", crunchyUtils.LocaleLanguage(locale)))
|
|
||||||
|
|
||||||
var errBuf bytes.Buffer
|
|
||||||
cmd := exec.CommandContext(ctx, "ffmpeg", "-i", video)
|
|
||||||
cmd.Stderr = &errBuf
|
|
||||||
cmd.Run()
|
|
||||||
|
|
||||||
matches := videoLength32Fix.FindStringSubmatch(errBuf.String())
|
|
||||||
hours, _ := strconv.Atoi(matches[1])
|
|
||||||
minutes, _ := strconv.Atoi(matches[2])
|
|
||||||
seconds, _ := strconv.Atoi(matches[3])
|
|
||||||
millis, _ := strconv.Atoi(matches[4])
|
|
||||||
|
|
||||||
if hours > videoLength[0] {
|
|
||||||
videoLength = [4]int{hours, minutes, seconds, millis}
|
|
||||||
} else if hours == videoLength[0] && minutes > videoLength[1] {
|
|
||||||
videoLength = [4]int{hours, minutes, seconds, millis}
|
|
||||||
} else if hours == videoLength[0] && minutes == videoLength[1] && seconds > videoLength[2] {
|
|
||||||
videoLength = [4]int{hours, minutes, seconds, millis}
|
|
||||||
} else if hours == videoLength[0] && minutes == videoLength[1] && seconds == videoLength[2] && millis > videoLength[3] {
|
|
||||||
videoLength = [4]int{hours, minutes, seconds, millis}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, audio := range audioFiles {
|
|
||||||
input = append(input, "-i", audio)
|
|
||||||
maps = append(maps, "-map", strconv.Itoa(i+len(videoFiles))+":1")
|
|
||||||
locale := crunchyroll.LOCALE(re.FindStringSubmatch(audio)[1])
|
|
||||||
metadata = append(metadata, fmt.Sprintf("-metadata:s:a:%d", i+len(videoFiles)), fmt.Sprintf("language=%s", locale))
|
|
||||||
metadata = append(metadata, fmt.Sprintf("-metadata:s:a:%d", i+len(videoFiles)), fmt.Sprintf("title=%s", crunchyUtils.LocaleLanguage(locale)))
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, subtitle := range subtitleFiles {
|
|
||||||
input = append(input, "-i", subtitle)
|
|
||||||
maps = append(maps, "-map", strconv.Itoa(i+len(videoFiles)+len(audioFiles)))
|
|
||||||
locale := crunchyroll.LOCALE(re.FindStringSubmatch(subtitle)[1])
|
|
||||||
metadata = append(metadata, fmt.Sprintf("-metadata:s:s:%d", i), fmt.Sprintf("language=%s", locale))
|
|
||||||
metadata = append(metadata, fmt.Sprintf("-metadata:s:s:%d", i), fmt.Sprintf("title=%s", crunchyUtils.LocaleLanguage(locale)))
|
|
||||||
}
|
|
||||||
|
|
||||||
commandOptions := []string{"-y"}
|
|
||||||
commandOptions = append(commandOptions, input...)
|
|
||||||
commandOptions = append(commandOptions, maps...)
|
|
||||||
commandOptions = append(commandOptions, metadata...)
|
|
||||||
// we have to create a temporary file here because it must be seekable
|
|
||||||
// for ffmpeg.
|
|
||||||
// ffmpeg could write to dst too, but this would require to re-encode
|
|
||||||
// the audio which results in much higher time and resource consumption
|
|
||||||
// (0-1 second with the temp file, ~20 seconds with re-encoding on my system)
|
|
||||||
file, err := os.CreateTemp("", "")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
file.Close()
|
|
||||||
defer os.Remove(file.Name())
|
|
||||||
|
|
||||||
commandOptions = append(commandOptions, "-disposition:s:0", "0", "-c", "copy", "-f", "matroska", file.Name())
|
|
||||||
|
|
||||||
// just a little nicer debug output to copy and paste the ffmpeg for debug reasons
|
|
||||||
if utils.Log.IsDev() {
|
|
||||||
var debugOptions []string
|
|
||||||
|
|
||||||
for _, option := range commandOptions {
|
|
||||||
if strings.HasPrefix(option, "title=") {
|
|
||||||
debugOptions = append(debugOptions, "title=\""+strings.TrimPrefix(option, "title=")+"\"")
|
|
||||||
} else if strings.HasPrefix(option, "language=") {
|
|
||||||
debugOptions = append(debugOptions, "language=\""+strings.TrimPrefix(option, "language=")+"\"")
|
|
||||||
} else if strings.Contains(option, " ") {
|
|
||||||
debugOptions = append(debugOptions, "\""+option+"\"")
|
|
||||||
} else {
|
|
||||||
debugOptions = append(debugOptions, option)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
utils.Log.Debug("FFmpeg merge command: ffmpeg %s", strings.Join(debugOptions, " "))
|
|
||||||
}
|
|
||||||
|
|
||||||
var errBuf bytes.Buffer
|
|
||||||
cmd := exec.CommandContext(ctx, "ffmpeg", commandOptions...)
|
|
||||||
cmd.Stderr = &errBuf
|
|
||||||
if err = cmd.Run(); err != nil {
|
|
||||||
return fmt.Errorf(errBuf.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
file, err = os.Open(file.Name())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer file.Close()
|
|
||||||
|
|
||||||
errBuf.Reset()
|
|
||||||
cmd = exec.CommandContext(ctx, "ffmpeg", "-i", file.Name())
|
|
||||||
cmd.Stderr = &errBuf
|
|
||||||
cmd.Run()
|
|
||||||
|
|
||||||
matches := videoLength32Fix.FindStringSubmatch(errBuf.String())
|
|
||||||
hours, _ := strconv.Atoi(matches[1])
|
|
||||||
minutes, _ := strconv.Atoi(matches[2])
|
|
||||||
seconds, _ := strconv.Atoi(matches[3])
|
|
||||||
millis, _ := strconv.Atoi(matches[4])
|
|
||||||
|
|
||||||
var reencode bool
|
|
||||||
if hours > videoLength[0] {
|
|
||||||
reencode = true
|
|
||||||
} else if hours == videoLength[0] && minutes > videoLength[1] {
|
|
||||||
reencode = true
|
|
||||||
} else if hours == videoLength[0] && minutes == videoLength[1] && seconds > videoLength[2] {
|
|
||||||
reencode = true
|
|
||||||
} else if hours == videoLength[0] && minutes == videoLength[1] && seconds == videoLength[2] && millis > videoLength[3] {
|
|
||||||
reencode = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// very dirty solution to https://github.com/crunchy-labs/crunchy-cli/issues/32.
|
|
||||||
// this might get triggered when not needed but there is currently no easy way to
|
|
||||||
// bypass this unwanted triggering
|
|
||||||
if reencode {
|
|
||||||
utils.Log.Debug("Re-encode to short video length")
|
|
||||||
|
|
||||||
file.Close()
|
|
||||||
|
|
||||||
tmpFile, _ := os.CreateTemp("", filepath.Base(file.Name())+"-32_fix")
|
|
||||||
tmpFile.Close()
|
|
||||||
|
|
||||||
errBuf.Reset()
|
|
||||||
cmd = exec.CommandContext(ctx, "ffmpeg",
|
|
||||||
"-y",
|
|
||||||
"-i", file.Name(),
|
|
||||||
"-map", "0",
|
|
||||||
"-c", "copy",
|
|
||||||
"-disposition:s:0", "0",
|
|
||||||
"-t", fmt.Sprintf("%02d:%02d:%02d.%d", videoLength[0], videoLength[1], videoLength[2], videoLength[3]),
|
|
||||||
"-f", "matroska",
|
|
||||||
tmpFile.Name())
|
|
||||||
cmd.Stderr = &errBuf
|
|
||||||
if err = cmd.Run(); err != nil {
|
|
||||||
return fmt.Errorf(errBuf.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
os.Remove(file.Name())
|
|
||||||
os.Rename(tmpFile.Name(), file.Name())
|
|
||||||
|
|
||||||
file, err = os.Open(file.Name())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer file.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = bufio.NewWriter(dst).ReadFrom(file)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func archiveExtractEpisodes(url string) ([][]utils.FormatInformation, error) {
|
|
||||||
var hasJapanese bool
|
|
||||||
languagesAsLocale := []crunchyroll.LOCALE{crunchyroll.JP}
|
|
||||||
for _, language := range archiveLanguagesFlag {
|
|
||||||
locale := crunchyroll.LOCALE(language)
|
|
||||||
if locale == crunchyroll.JP {
|
|
||||||
hasJapanese = true
|
|
||||||
} else {
|
|
||||||
languagesAsLocale = append(languagesAsLocale, locale)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := crunchyroll.ParseEpisodeURL(url); ok {
|
|
||||||
return nil, fmt.Errorf("archiving episodes by url is no longer supported (thx crunchyroll). use the series url instead and filter after the given episode (https://github.com/crunchy-labs/crunchy-cli/wiki/Cli#filter)")
|
|
||||||
}
|
|
||||||
|
|
||||||
episodes, err := utils.ExtractEpisodes(url, languagesAsLocale...)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasJapanese && len(episodes[1:]) == 0 {
|
|
||||||
return nil, fmt.Errorf("no episodes found")
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, eps := range episodes {
|
|
||||||
if len(eps) == 0 {
|
|
||||||
utils.Log.SetProcess("%s has no matching episodes", languagesAsLocale[i])
|
|
||||||
} else if len(episodes[0]) > len(eps) {
|
|
||||||
utils.Log.SetProcess("%s has %d less episodes than existing in japanese (%d)", languagesAsLocale[i], len(episodes[0])-len(eps), len(episodes[0]))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasJapanese {
|
|
||||||
episodes = episodes[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
eps := make(map[int]map[int]*utils.FormatInformation)
|
|
||||||
for _, lang := range episodes {
|
|
||||||
for _, season := range crunchyUtils.SortEpisodesBySeason(lang) {
|
|
||||||
if _, ok := eps[season[0].SeasonNumber]; !ok {
|
|
||||||
eps[season[0].SeasonNumber] = map[int]*utils.FormatInformation{}
|
|
||||||
}
|
|
||||||
for _, episode := range season {
|
|
||||||
format, err := episode.GetFormat(archiveResolutionFlag, "", false)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("error while receiving format for %s: %v", episode.Title, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := eps[episode.SeasonNumber][episode.EpisodeNumber]; !ok {
|
|
||||||
eps[episode.SeasonNumber][episode.EpisodeNumber] = &utils.FormatInformation{
|
|
||||||
Format: format,
|
|
||||||
AdditionalFormats: make([]*crunchyroll.Format, 0),
|
|
||||||
|
|
||||||
Title: episode.Title,
|
|
||||||
SeriesName: episode.SeriesTitle,
|
|
||||||
SeasonName: episode.SeasonTitle,
|
|
||||||
SeasonNumber: episode.SeasonNumber,
|
|
||||||
EpisodeNumber: episode.EpisodeNumber,
|
|
||||||
Resolution: format.Video.Resolution,
|
|
||||||
FPS: format.Video.FrameRate,
|
|
||||||
Audio: format.AudioLocale,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
eps[episode.SeasonNumber][episode.EpisodeNumber].AdditionalFormats = append(eps[episode.SeasonNumber][episode.EpisodeNumber].AdditionalFormats, format)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var infoFormat [][]utils.FormatInformation
|
|
||||||
var keys []int
|
|
||||||
for e := range eps {
|
|
||||||
keys = append(keys, e)
|
|
||||||
}
|
|
||||||
sort.Ints(keys)
|
|
||||||
|
|
||||||
for _, k := range keys {
|
|
||||||
var tmpFormatInfo []utils.FormatInformation
|
|
||||||
var kkey []int
|
|
||||||
for ee := range eps[k] {
|
|
||||||
kkey = append(kkey, ee)
|
|
||||||
}
|
|
||||||
sort.Ints(kkey)
|
|
||||||
|
|
||||||
for _, kk := range kkey {
|
|
||||||
tmpFormatInfo = append(tmpFormatInfo, *eps[k][kk])
|
|
||||||
}
|
|
||||||
|
|
||||||
infoFormat = append(infoFormat, tmpFormatInfo)
|
|
||||||
}
|
|
||||||
|
|
||||||
return infoFormat, nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,136 +0,0 @@
|
||||||
package archive
|
|
||||||
|
|
||||||
import (
|
|
||||||
"archive/tar"
|
|
||||||
"archive/zip"
|
|
||||||
"bytes"
|
|
||||||
"compress/gzip"
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/utils"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Compress interface {
|
|
||||||
io.Closer
|
|
||||||
|
|
||||||
NewFile(information utils.FormatInformation) (io.WriteCloser, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewGzipCompress(file *os.File) *TarCompress {
|
|
||||||
gw := gzip.NewWriter(file)
|
|
||||||
return &TarCompress{
|
|
||||||
parent: gw,
|
|
||||||
dst: tar.NewWriter(gw),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewTarCompress(file *os.File) *TarCompress {
|
|
||||||
return &TarCompress{
|
|
||||||
dst: tar.NewWriter(file),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type TarCompress struct {
|
|
||||||
Compress
|
|
||||||
|
|
||||||
wg sync.WaitGroup
|
|
||||||
|
|
||||||
parent *gzip.Writer
|
|
||||||
dst *tar.Writer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tc *TarCompress) Close() error {
|
|
||||||
// we have to wait here in case the actual content isn't copied completely into the
|
|
||||||
// writer yet
|
|
||||||
tc.wg.Wait()
|
|
||||||
|
|
||||||
var err, err2 error
|
|
||||||
if tc.parent != nil {
|
|
||||||
err2 = tc.parent.Close()
|
|
||||||
}
|
|
||||||
err = tc.dst.Close()
|
|
||||||
|
|
||||||
if err != nil && err2 != nil {
|
|
||||||
// best way to show double errors at once that I've found
|
|
||||||
return fmt.Errorf("%v\n%v", err, err2)
|
|
||||||
} else if err == nil && err2 != nil {
|
|
||||||
err = err2
|
|
||||||
}
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tc *TarCompress) NewFile(information utils.FormatInformation) (io.WriteCloser, error) {
|
|
||||||
rp, wp := io.Pipe()
|
|
||||||
go func() {
|
|
||||||
tc.wg.Add(1)
|
|
||||||
defer tc.wg.Done()
|
|
||||||
var buf bytes.Buffer
|
|
||||||
io.Copy(&buf, rp)
|
|
||||||
|
|
||||||
header := &tar.Header{
|
|
||||||
Name: filepath.Join(fmt.Sprintf("S%2d", information.SeasonNumber), information.Title),
|
|
||||||
ModTime: time.Now(),
|
|
||||||
Mode: 0644,
|
|
||||||
Typeflag: tar.TypeReg,
|
|
||||||
// fun fact: I did not set the size for quiet some time because I thought that it isn't
|
|
||||||
// required. well because of this I debugged this part for multiple hours because without
|
|
||||||
// proper size information only a tiny amount gets copied into the tar (or zip) writer.
|
|
||||||
// this is also the reason why the file content is completely copied into a buffer before
|
|
||||||
// writing it to the writer. I could bypass this and save some memory but this requires
|
|
||||||
// some rewriting and im nearly at the (planned) finish for version 2 so nah in the future
|
|
||||||
// maybe
|
|
||||||
Size: int64(buf.Len()),
|
|
||||||
}
|
|
||||||
tc.dst.WriteHeader(header)
|
|
||||||
io.Copy(tc.dst, &buf)
|
|
||||||
}()
|
|
||||||
return wp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewZipCompress(file *os.File) *ZipCompress {
|
|
||||||
return &ZipCompress{
|
|
||||||
dst: zip.NewWriter(file),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type ZipCompress struct {
|
|
||||||
Compress
|
|
||||||
|
|
||||||
wg sync.WaitGroup
|
|
||||||
|
|
||||||
dst *zip.Writer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (zc *ZipCompress) Close() error {
|
|
||||||
zc.wg.Wait()
|
|
||||||
return zc.dst.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (zc *ZipCompress) NewFile(information utils.FormatInformation) (io.WriteCloser, error) {
|
|
||||||
rp, wp := io.Pipe()
|
|
||||||
go func() {
|
|
||||||
zc.wg.Add(1)
|
|
||||||
defer zc.wg.Done()
|
|
||||||
|
|
||||||
var buf bytes.Buffer
|
|
||||||
io.Copy(&buf, rp)
|
|
||||||
|
|
||||||
header := &zip.FileHeader{
|
|
||||||
Name: filepath.Join(fmt.Sprintf("S%2d", information.SeasonNumber), information.Title),
|
|
||||||
Modified: time.Now(),
|
|
||||||
Method: zip.Deflate,
|
|
||||||
UncompressedSize64: uint64(buf.Len()),
|
|
||||||
}
|
|
||||||
header.SetMode(0644)
|
|
||||||
|
|
||||||
hw, _ := zc.dst.CreateHeader(header)
|
|
||||||
io.Copy(hw, &buf)
|
|
||||||
}()
|
|
||||||
|
|
||||||
return wp, nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,362 +0,0 @@
|
||||||
package download
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/utils"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3"
|
|
||||||
crunchyUtils "github.com/crunchy-labs/crunchyroll-go/v3/utils"
|
|
||||||
"github.com/grafov/m3u8"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"math"
|
|
||||||
"os"
|
|
||||||
"os/signal"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
downloadAudioFlag string
|
|
||||||
downloadSubtitleFlag string
|
|
||||||
|
|
||||||
downloadDirectoryFlag string
|
|
||||||
downloadOutputFlag string
|
|
||||||
downloadTempDirFlag string
|
|
||||||
|
|
||||||
downloadResolutionFlag string
|
|
||||||
|
|
||||||
downloadGoroutinesFlag int
|
|
||||||
)
|
|
||||||
|
|
||||||
var Cmd = &cobra.Command{
|
|
||||||
Use: "download",
|
|
||||||
Short: "Download a video",
|
|
||||||
Args: cobra.MinimumNArgs(1),
|
|
||||||
|
|
||||||
PreRunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
utils.Log.Debug("Validating arguments")
|
|
||||||
|
|
||||||
if filepath.Ext(downloadOutputFlag) != ".ts" {
|
|
||||||
if !utils.HasFFmpeg() {
|
|
||||||
return fmt.Errorf("the file ending for the output file (%s) is not `.ts`. "+
|
|
||||||
"Install ffmpeg (https://ffmpeg.org/download.html) to use other media file endings (e.g. `.mp4`)", downloadOutputFlag)
|
|
||||||
} else {
|
|
||||||
utils.Log.Debug("Custom file ending '%s' (ffmpeg is installed)", filepath.Ext(downloadOutputFlag))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if downloadAudioFlag != "" && !crunchyUtils.ValidateLocale(crunchyroll.LOCALE(downloadAudioFlag)) {
|
|
||||||
return fmt.Errorf("%s is not a valid audio locale. Choose from: %s", downloadAudioFlag, strings.Join(utils.LocalesAsStrings(), ", "))
|
|
||||||
} else if downloadSubtitleFlag != "" && !crunchyUtils.ValidateLocale(crunchyroll.LOCALE(downloadSubtitleFlag)) {
|
|
||||||
return fmt.Errorf("%s is not a valid subtitle locale. Choose from: %s", downloadSubtitleFlag, strings.Join(utils.LocalesAsStrings(), ", "))
|
|
||||||
}
|
|
||||||
utils.Log.Debug("Locales: audio: %s / subtitle: %s", downloadAudioFlag, downloadSubtitleFlag)
|
|
||||||
|
|
||||||
switch downloadResolutionFlag {
|
|
||||||
case "1080p", "720p", "480p", "360p":
|
|
||||||
intRes, _ := strconv.ParseFloat(strings.TrimSuffix(downloadResolutionFlag, "p"), 84)
|
|
||||||
downloadResolutionFlag = fmt.Sprintf("%.0fx%s", math.Ceil(intRes*(float64(16)/float64(9))), strings.TrimSuffix(downloadResolutionFlag, "p"))
|
|
||||||
case "240p":
|
|
||||||
// 240p would round up to 427x240 if used in the case statement above, so it has to be handled separately
|
|
||||||
downloadResolutionFlag = "428x240"
|
|
||||||
case "1920x1080", "1280x720", "640x480", "480x360", "428x240", "best", "worst":
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("'%s' is not a valid resolution", downloadResolutionFlag)
|
|
||||||
}
|
|
||||||
utils.Log.Debug("Using resolution '%s'", downloadResolutionFlag)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
if err := commands.LoadCrunchy(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return download(args)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
Cmd.Flags().StringVarP(&downloadAudioFlag, "audio",
|
|
||||||
"a",
|
|
||||||
"",
|
|
||||||
"The locale of the audio. Available locales: "+strings.Join(utils.LocalesAsStrings(), ", "))
|
|
||||||
Cmd.Flags().StringVarP(&downloadSubtitleFlag,
|
|
||||||
"subtitle",
|
|
||||||
"s",
|
|
||||||
"",
|
|
||||||
"The locale of the subtitle. Available locales: "+strings.Join(utils.LocalesAsStrings(), ", "))
|
|
||||||
|
|
||||||
cwd, _ := os.Getwd()
|
|
||||||
Cmd.Flags().StringVarP(&downloadDirectoryFlag,
|
|
||||||
"directory",
|
|
||||||
"d",
|
|
||||||
cwd,
|
|
||||||
"The directory to download the file(s) into")
|
|
||||||
Cmd.Flags().StringVarP(&downloadOutputFlag,
|
|
||||||
"output",
|
|
||||||
"o",
|
|
||||||
"{title}.ts",
|
|
||||||
"Name of the output file. "+
|
|
||||||
"If you use the following things in the name, the will get replaced:\n"+
|
|
||||||
"\t{title} » Title of the video\n"+
|
|
||||||
"\t{series_name} » Name of the series\n"+
|
|
||||||
"\t{season_name} » Name of the season\n"+
|
|
||||||
"\t{season_number} » Number of the season\n"+
|
|
||||||
"\t{episode_number} » Number of the episode\n"+
|
|
||||||
"\t{resolution} » Resolution of the video\n"+
|
|
||||||
"\t{fps} » Frame Rate of the video\n"+
|
|
||||||
"\t{audio} » Audio locale of the video\n"+
|
|
||||||
"\t{subtitle} » Subtitle locale of the video")
|
|
||||||
Cmd.Flags().StringVar(&downloadTempDirFlag,
|
|
||||||
"temp",
|
|
||||||
os.TempDir(),
|
|
||||||
"Directory to store temporary files in")
|
|
||||||
|
|
||||||
Cmd.Flags().StringVarP(&downloadResolutionFlag,
|
|
||||||
"resolution",
|
|
||||||
"r",
|
|
||||||
"best",
|
|
||||||
"The video resolution. Can either be specified via the pixels, the abbreviation for pixels, or 'common-use' words\n"+
|
|
||||||
"\tAvailable pixels: 1920x1080, 1280x720, 640x480, 480x360, 428x240\n"+
|
|
||||||
"\tAvailable abbreviations: 1080p, 720p, 480p, 360p, 240p\n"+
|
|
||||||
"\tAvailable common-use words: best (best available resolution), worst (worst available resolution)")
|
|
||||||
|
|
||||||
Cmd.Flags().IntVarP(&downloadGoroutinesFlag,
|
|
||||||
"goroutines",
|
|
||||||
"g",
|
|
||||||
runtime.NumCPU(),
|
|
||||||
"Sets how many parallel segment downloads should be used")
|
|
||||||
}
|
|
||||||
|
|
||||||
func download(urls []string) error {
|
|
||||||
for i, url := range urls {
|
|
||||||
utils.Log.SetProcess("Parsing url %d", i+1)
|
|
||||||
episodes, err := downloadExtractEpisodes(url)
|
|
||||||
if err != nil {
|
|
||||||
utils.Log.StopProcess("Failed to parse url %d", i+1)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
utils.Log.StopProcess("Parsed url %d", i+1)
|
|
||||||
|
|
||||||
for _, season := range episodes {
|
|
||||||
utils.Log.Info("%s Season %d", season[0].SeriesName, season[0].SeasonNumber)
|
|
||||||
|
|
||||||
for j, info := range season {
|
|
||||||
utils.Log.Info("\t%d. %s » %spx, %.2f FPS (S%02dE%02d)",
|
|
||||||
j+1,
|
|
||||||
info.Title,
|
|
||||||
info.Resolution,
|
|
||||||
info.FPS,
|
|
||||||
info.SeasonNumber,
|
|
||||||
info.EpisodeNumber)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
utils.Log.Empty()
|
|
||||||
|
|
||||||
for j, season := range episodes {
|
|
||||||
for k, info := range season {
|
|
||||||
dir := info.FormatString(downloadDirectoryFlag)
|
|
||||||
if _, err = os.Stat(dir); os.IsNotExist(err) {
|
|
||||||
if err = os.MkdirAll(dir, 0777); err != nil {
|
|
||||||
return fmt.Errorf("error while creating directory: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
file, err := os.Create(utils.GenerateFilename(info.FormatString(downloadOutputFlag), dir))
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to create output file: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = downloadInfo(info, file); err != nil {
|
|
||||||
file.Close()
|
|
||||||
os.Remove(file.Name())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
file.Close()
|
|
||||||
|
|
||||||
if i != len(urls)-1 || j != len(episodes)-1 || k != len(season)-1 {
|
|
||||||
utils.Log.Empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func downloadInfo(info utils.FormatInformation, file *os.File) error {
|
|
||||||
utils.Log.Debug("Entering season %d, episode %d", info.SeasonNumber, info.EpisodeNumber)
|
|
||||||
|
|
||||||
if err := info.Format.InitVideo(); err != nil {
|
|
||||||
return fmt.Errorf("error while initializing the video: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dp := &commands.DownloadProgress{
|
|
||||||
Prefix: utils.Log.(*commands.Logger).InfoLog.Prefix(),
|
|
||||||
Message: "Downloading video",
|
|
||||||
// number of segments a video has +2 is for merging and the success message
|
|
||||||
Total: int(info.Format.Video.Chunklist.Count()) + 2,
|
|
||||||
Dev: utils.Log.IsDev(),
|
|
||||||
Quiet: utils.Log.(*commands.Logger).IsQuiet(),
|
|
||||||
}
|
|
||||||
if utils.Log.IsDev() {
|
|
||||||
dp.Prefix = utils.Log.(*commands.Logger).DebugLog.Prefix()
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
if dp.Total != dp.Current {
|
|
||||||
fmt.Println()
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
|
||||||
defer cancel()
|
|
||||||
downloader := crunchyroll.NewDownloader(ctx, file, downloadGoroutinesFlag, func(segment *m3u8.MediaSegment, current, total int, file *os.File) error {
|
|
||||||
// check if the context was cancelled.
|
|
||||||
// must be done in to not print any progress messages if ctrl+c was pressed
|
|
||||||
if ctx.Err() != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if utils.Log.IsDev() {
|
|
||||||
dp.UpdateMessage(fmt.Sprintf("Downloading %d/%d (%.2f%%) » %s", current, total, float32(current)/float32(total)*100, segment.URI), false)
|
|
||||||
} else {
|
|
||||||
dp.Update()
|
|
||||||
}
|
|
||||||
|
|
||||||
if current == total {
|
|
||||||
dp.UpdateMessage("Merging segments", false)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
tmp, _ := os.MkdirTemp(downloadTempDirFlag, "crunchy_")
|
|
||||||
downloader.TempDir = tmp
|
|
||||||
if utils.HasFFmpeg() {
|
|
||||||
downloader.FFmpegOpts = make([]string, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
sig := make(chan os.Signal, 1)
|
|
||||||
signal.Notify(sig, os.Interrupt)
|
|
||||||
go func() {
|
|
||||||
select {
|
|
||||||
case <-sig:
|
|
||||||
signal.Stop(sig)
|
|
||||||
utils.Log.Err("Exiting... (may take a few seconds)")
|
|
||||||
utils.Log.Err("To force exit press ctrl+c (again)")
|
|
||||||
cancel()
|
|
||||||
// os.Exit(1) is not called because an immediate exit after the cancel function does not let
|
|
||||||
// the download process enough time to stop gratefully. A result of this is that the temporary
|
|
||||||
// directory where the segments are downloaded to will not be deleted
|
|
||||||
case <-ctx.Done():
|
|
||||||
// this is just here to end the goroutine and prevent it from running forever without a reason
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
utils.Log.Debug("Set up signal catcher")
|
|
||||||
|
|
||||||
utils.Log.Info("Downloading episode `%s` to `%s`", info.Title, filepath.Base(file.Name()))
|
|
||||||
utils.Log.Info("\tEpisode: S%02dE%02d", info.SeasonNumber, info.EpisodeNumber)
|
|
||||||
utils.Log.Info("\tAudio: %s", info.Audio)
|
|
||||||
utils.Log.Info("\tSubtitle: %s", info.Subtitle)
|
|
||||||
utils.Log.Info("\tResolution: %spx", info.Resolution)
|
|
||||||
utils.Log.Info("\tFPS: %.2f", info.FPS)
|
|
||||||
if err := info.Format.Download(downloader); err != nil {
|
|
||||||
return fmt.Errorf("error while downloading: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dp.UpdateMessage("Download finished", false)
|
|
||||||
|
|
||||||
signal.Stop(sig)
|
|
||||||
utils.Log.Debug("Stopped signal catcher")
|
|
||||||
|
|
||||||
utils.Log.Empty()
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func downloadExtractEpisodes(url string) ([][]utils.FormatInformation, error) {
|
|
||||||
var episodes [][]*crunchyroll.Episode
|
|
||||||
var final []*crunchyroll.Episode
|
|
||||||
|
|
||||||
if downloadAudioFlag != "" {
|
|
||||||
if _, ok := crunchyroll.ParseEpisodeURL(url); ok {
|
|
||||||
return nil, fmt.Errorf("downloading episodes by url and specifying a language is no longer supported (thx crunchyroll). use the series url instead and filter after the given episode (https://github.com/crunchy-labs/crunchy-cli/wiki/Cli#filter)")
|
|
||||||
}
|
|
||||||
|
|
||||||
var err error
|
|
||||||
episodes, err = utils.ExtractEpisodes(url, crunchyroll.JP, crunchyroll.LOCALE(downloadAudioFlag))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
japanese := episodes[0]
|
|
||||||
custom := episodes[1]
|
|
||||||
|
|
||||||
sort.Sort(crunchyUtils.EpisodesByNumber(japanese))
|
|
||||||
sort.Sort(crunchyUtils.EpisodesByNumber(custom))
|
|
||||||
|
|
||||||
var errMessages []string
|
|
||||||
|
|
||||||
if len(japanese) == 0 || len(japanese) == len(custom) {
|
|
||||||
final = custom
|
|
||||||
} else {
|
|
||||||
for _, jp := range japanese {
|
|
||||||
before := len(final)
|
|
||||||
for _, episode := range custom {
|
|
||||||
if jp.SeasonNumber == episode.SeasonNumber && jp.EpisodeNumber == episode.EpisodeNumber {
|
|
||||||
final = append(final, episode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if before == len(final) {
|
|
||||||
errMessages = append(errMessages, fmt.Sprintf("%s has no %s audio, using %s as fallback", jp.Title, crunchyroll.LOCALE(downloadAudioFlag), crunchyroll.JP))
|
|
||||||
final = append(final, jp)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(errMessages) > 10 {
|
|
||||||
for _, msg := range errMessages[:10] {
|
|
||||||
utils.Log.SetProcess(msg)
|
|
||||||
}
|
|
||||||
utils.Log.SetProcess("... and %d more", len(errMessages)-10)
|
|
||||||
} else {
|
|
||||||
for _, msg := range errMessages {
|
|
||||||
utils.Log.SetProcess(msg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
var err error
|
|
||||||
episodes, err = utils.ExtractEpisodes(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
} else if len(episodes) == 0 {
|
|
||||||
return nil, fmt.Errorf("cannot find any episode")
|
|
||||||
}
|
|
||||||
final = episodes[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
var infoFormat [][]utils.FormatInformation
|
|
||||||
for _, season := range crunchyUtils.SortEpisodesBySeason(final) {
|
|
||||||
tmpFormatInformation := make([]utils.FormatInformation, 0)
|
|
||||||
for _, episode := range season {
|
|
||||||
format, err := episode.GetFormat(downloadResolutionFlag, crunchyroll.LOCALE(downloadSubtitleFlag), true)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("error while receiving format for %s: %v", episode.Title, err)
|
|
||||||
}
|
|
||||||
tmpFormatInformation = append(tmpFormatInformation, utils.FormatInformation{
|
|
||||||
Format: format,
|
|
||||||
|
|
||||||
Title: episode.Title,
|
|
||||||
SeriesName: episode.SeriesTitle,
|
|
||||||
SeasonName: episode.SeasonTitle,
|
|
||||||
SeasonNumber: episode.SeasonNumber,
|
|
||||||
EpisodeNumber: episode.EpisodeNumber,
|
|
||||||
Resolution: format.Video.Resolution,
|
|
||||||
FPS: format.Video.FrameRate,
|
|
||||||
Audio: format.AudioLocale,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
infoFormat = append(infoFormat, tmpFormatInformation)
|
|
||||||
}
|
|
||||||
return infoFormat, nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,40 +0,0 @@
|
||||||
package info
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/utils"
|
|
||||||
crunchyUtils "github.com/crunchy-labs/crunchyroll-go/v3/utils"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
var Cmd = &cobra.Command{
|
|
||||||
Use: "info",
|
|
||||||
Short: "Shows information about the logged in user",
|
|
||||||
Args: cobra.MinimumNArgs(0),
|
|
||||||
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
if err := commands.LoadCrunchy(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return info()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func info() error {
|
|
||||||
account, err := utils.Crunchy.Account()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Println("Username: ", account.Username)
|
|
||||||
fmt.Println("Email: ", account.Email)
|
|
||||||
fmt.Println("Premium: ", utils.Crunchy.Config.Premium)
|
|
||||||
fmt.Println("Interface language: ", crunchyUtils.LocaleLanguage(account.PreferredCommunicationLanguage))
|
|
||||||
fmt.Println("Subtitle language: ", crunchyUtils.LocaleLanguage(account.PreferredContentSubtitleLanguage))
|
|
||||||
fmt.Println("Created: ", account.Created)
|
|
||||||
fmt.Println("Account ID: ", account.AccountID)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,196 +0,0 @@
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/utils"
|
|
||||||
"io"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
var prefix, progressDown, progressDownFinish string
|
|
||||||
|
|
||||||
func initPrefixBecauseWindowsSucksBallsHard() {
|
|
||||||
// dear windows user, please change to a good OS, linux in the best case.
|
|
||||||
// MICROSHIT DOES NOT GET IT DONE TO SHOW THE SYMBOLS IN THE ELSE CLAUSE
|
|
||||||
// CORRECTLY. NOT IN THE CMD NOR POWERSHELL. WHY TF, IT IS ONE OF THE MOST
|
|
||||||
// PROFITABLE COMPANIES ON THIS PLANET AND CANNOT SHOW A PROPER UTF-8 SYMBOL
|
|
||||||
// IN THEIR OWN PRODUCT WHICH GETS USED MILLION TIMES A DAY
|
|
||||||
if runtime.GOOS == "windows" {
|
|
||||||
prefix = "=>"
|
|
||||||
progressDown = "|"
|
|
||||||
progressDownFinish = "->"
|
|
||||||
} else {
|
|
||||||
prefix = "➞"
|
|
||||||
progressDown = "↓"
|
|
||||||
progressDownFinish = "↳"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type progress struct {
|
|
||||||
message string
|
|
||||||
stop bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewLogger(debug, info, err bool) *Logger {
|
|
||||||
initPrefixBecauseWindowsSucksBallsHard()
|
|
||||||
|
|
||||||
debugLog, infoLog, errLog := log.New(io.Discard, prefix+" ", 0), log.New(io.Discard, prefix+" ", 0), log.New(io.Discard, prefix+" ", 0)
|
|
||||||
|
|
||||||
if debug {
|
|
||||||
debugLog.SetOutput(os.Stdout)
|
|
||||||
}
|
|
||||||
if info {
|
|
||||||
infoLog.SetOutput(os.Stdout)
|
|
||||||
}
|
|
||||||
if err {
|
|
||||||
errLog.SetOutput(os.Stderr)
|
|
||||||
}
|
|
||||||
|
|
||||||
if debug {
|
|
||||||
debugLog = log.New(debugLog.Writer(), "[debug] ", 0)
|
|
||||||
infoLog = log.New(infoLog.Writer(), "[info] ", 0)
|
|
||||||
errLog = log.New(errLog.Writer(), "[err] ", 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Logger{
|
|
||||||
DebugLog: debugLog,
|
|
||||||
InfoLog: infoLog,
|
|
||||||
ErrLog: errLog,
|
|
||||||
|
|
||||||
devView: debug,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Logger struct {
|
|
||||||
utils.Logger
|
|
||||||
|
|
||||||
DebugLog *log.Logger
|
|
||||||
InfoLog *log.Logger
|
|
||||||
ErrLog *log.Logger
|
|
||||||
|
|
||||||
devView bool
|
|
||||||
|
|
||||||
progress chan progress
|
|
||||||
done chan interface{}
|
|
||||||
lock sync.Mutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Logger) IsDev() bool {
|
|
||||||
return l.devView
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Logger) IsQuiet() bool {
|
|
||||||
return l.DebugLog.Writer() == io.Discard && l.InfoLog.Writer() == io.Discard && l.ErrLog.Writer() == io.Discard
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Logger) Debug(format string, v ...interface{}) {
|
|
||||||
l.DebugLog.Printf(format, v...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Logger) Info(format string, v ...interface{}) {
|
|
||||||
l.InfoLog.Printf(format, v...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Logger) Warn(format string, v ...interface{}) {
|
|
||||||
l.Err(format, v...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Logger) Err(format string, v ...interface{}) {
|
|
||||||
l.ErrLog.Printf(format, v...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Logger) Empty() {
|
|
||||||
if !l.devView && l.InfoLog.Writer() != io.Discard {
|
|
||||||
fmt.Println("")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Logger) SetProcess(format string, v ...interface{}) {
|
|
||||||
if l.InfoLog.Writer() == io.Discard {
|
|
||||||
return
|
|
||||||
} else if l.devView {
|
|
||||||
l.Debug(format, v...)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
initialMessage := fmt.Sprintf(format, v...)
|
|
||||||
|
|
||||||
p := progress{
|
|
||||||
message: initialMessage,
|
|
||||||
}
|
|
||||||
|
|
||||||
l.lock.Lock()
|
|
||||||
if l.done != nil {
|
|
||||||
l.progress <- p
|
|
||||||
return
|
|
||||||
} else {
|
|
||||||
l.progress = make(chan progress, 1)
|
|
||||||
l.progress <- p
|
|
||||||
l.done = make(chan interface{})
|
|
||||||
}
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
states := []string{"-", "\\", "|", "/"}
|
|
||||||
|
|
||||||
var count int
|
|
||||||
|
|
||||||
for i := 0; ; i++ {
|
|
||||||
select {
|
|
||||||
case p := <-l.progress:
|
|
||||||
if p.stop {
|
|
||||||
fmt.Printf("\r" + strings.Repeat(" ", len(prefix)+len(initialMessage)))
|
|
||||||
if count > 1 {
|
|
||||||
fmt.Printf("\r%s %s\n", progressDownFinish, p.message)
|
|
||||||
} else {
|
|
||||||
fmt.Printf("\r%s %s\n", prefix, p.message)
|
|
||||||
}
|
|
||||||
|
|
||||||
if l.done != nil {
|
|
||||||
l.done <- nil
|
|
||||||
}
|
|
||||||
l.progress = nil
|
|
||||||
|
|
||||||
l.lock.Unlock()
|
|
||||||
return
|
|
||||||
} else {
|
|
||||||
if count > 0 {
|
|
||||||
fmt.Printf("\r%s %s\n", progressDown, p.message)
|
|
||||||
}
|
|
||||||
l.progress = make(chan progress, 1)
|
|
||||||
|
|
||||||
count++
|
|
||||||
|
|
||||||
fmt.Printf("\r%s %s", states[i/10%4], initialMessage)
|
|
||||||
l.lock.Unlock()
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
if i%10 == 0 {
|
|
||||||
fmt.Printf("\r%s %s", states[i/10%4], initialMessage)
|
|
||||||
}
|
|
||||||
time.Sleep(35 * time.Millisecond)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Logger) StopProcess(format string, v ...interface{}) {
|
|
||||||
if l.InfoLog.Writer() == io.Discard {
|
|
||||||
return
|
|
||||||
} else if l.devView {
|
|
||||||
l.Debug(format, v...)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
l.lock.Lock()
|
|
||||||
l.progress <- progress{
|
|
||||||
message: fmt.Sprintf(format, v...),
|
|
||||||
stop: true,
|
|
||||||
}
|
|
||||||
<-l.done
|
|
||||||
l.done = nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,159 +0,0 @@
|
||||||
package login
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/utils"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
loginPersistentFlag bool
|
|
||||||
loginEncryptFlag bool
|
|
||||||
|
|
||||||
loginSessionIDFlag bool
|
|
||||||
loginRefreshTokenFlag bool
|
|
||||||
)
|
|
||||||
|
|
||||||
var Cmd = &cobra.Command{
|
|
||||||
Use: "login",
|
|
||||||
Short: "Login to crunchyroll",
|
|
||||||
Args: cobra.RangeArgs(1, 2),
|
|
||||||
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
if loginSessionIDFlag {
|
|
||||||
return loginSessionID(args[0])
|
|
||||||
} else if loginRefreshTokenFlag {
|
|
||||||
return loginRefreshToken(args[0])
|
|
||||||
} else {
|
|
||||||
return loginCredentials(args[0], args[1])
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
Cmd.Flags().BoolVar(&loginPersistentFlag,
|
|
||||||
"persistent",
|
|
||||||
false,
|
|
||||||
"If the given credential should be stored persistent")
|
|
||||||
Cmd.Flags().BoolVar(&loginEncryptFlag,
|
|
||||||
"encrypt",
|
|
||||||
false,
|
|
||||||
"Encrypt the given credentials (won't do anything if --session-id is given or --persistent is not given)")
|
|
||||||
|
|
||||||
Cmd.Flags().BoolVar(&loginSessionIDFlag,
|
|
||||||
"session-id",
|
|
||||||
false,
|
|
||||||
"Use a session id to login instead of username and password")
|
|
||||||
Cmd.Flags().BoolVar(&loginRefreshTokenFlag,
|
|
||||||
"refresh-token",
|
|
||||||
false,
|
|
||||||
"Use a refresh token to login instead of username and password. Can be obtained by copying the `etp-rt` cookie from crunchyroll.com")
|
|
||||||
|
|
||||||
Cmd.MarkFlagsMutuallyExclusive("session-id", "refresh-token")
|
|
||||||
}
|
|
||||||
|
|
||||||
func loginCredentials(user, password string) error {
|
|
||||||
utils.Log.Debug("Logging in via credentials")
|
|
||||||
c, err := crunchyroll.LoginWithCredentials(user, password, utils.SystemLocale(false), utils.Client)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if loginPersistentFlag {
|
|
||||||
var passwd []byte
|
|
||||||
if loginEncryptFlag {
|
|
||||||
for {
|
|
||||||
fmt.Print("Enter password: ")
|
|
||||||
passwd, err = commands.ReadLineSilent()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
fmt.Println()
|
|
||||||
|
|
||||||
fmt.Print("Enter password again: ")
|
|
||||||
repasswd, err := commands.ReadLineSilent()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
fmt.Println()
|
|
||||||
|
|
||||||
if bytes.Equal(passwd, repasswd) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
fmt.Println("Passwords does not match, try again")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err = utils.SaveCredentialsPersistent(user, password, passwd); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if !loginEncryptFlag {
|
|
||||||
utils.Log.Warn("The login information will be stored permanently UNENCRYPTED on your drive. " +
|
|
||||||
"To encrypt it, use the `--encrypt` flag")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err = utils.SaveSession(c); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !loginPersistentFlag {
|
|
||||||
utils.Log.Info("Due to security reasons, you have to login again on the next reboot")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func loginSessionID(sessionID string) error {
|
|
||||||
utils.Log.Debug("Logging in via session id")
|
|
||||||
utils.Log.Warn("Logging in with session id is deprecated and not very reliable. Consider choosing another option (if it fails)")
|
|
||||||
var c *crunchyroll.Crunchyroll
|
|
||||||
var err error
|
|
||||||
if c, err = crunchyroll.LoginWithSessionID(sessionID, utils.SystemLocale(false), utils.Client); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if loginPersistentFlag {
|
|
||||||
if err = utils.SaveSessionPersistent(c); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
utils.Log.Warn("The login information will be stored permanently UNENCRYPTED on your drive")
|
|
||||||
}
|
|
||||||
if err = utils.SaveSession(c); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !loginPersistentFlag {
|
|
||||||
utils.Log.Info("Due to security reasons, you have to login again on the next reboot")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func loginRefreshToken(refreshToken string) error {
|
|
||||||
utils.Log.Debug("Logging in via refresh token")
|
|
||||||
var c *crunchyroll.Crunchyroll
|
|
||||||
var err error
|
|
||||||
if c, err = crunchyroll.LoginWithRefreshToken(refreshToken, utils.SystemLocale(false), utils.Client); err != nil {
|
|
||||||
utils.Log.Err(err.Error())
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
if loginPersistentFlag {
|
|
||||||
if err = utils.SaveSessionPersistent(c); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
utils.Log.Warn("The login information will be stored permanently UNENCRYPTED on your drive")
|
|
||||||
}
|
|
||||||
if err = utils.SaveSession(c); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !loginPersistentFlag {
|
|
||||||
utils.Log.Info("Due to security reasons, you have to login again on the next reboot")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,48 +0,0 @@
|
||||||
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"syscall"
|
|
||||||
)
|
|
||||||
|
|
||||||
// https://github.com/bgentry/speakeasy/blob/master/speakeasy_unix.go
|
|
||||||
var stty string
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
var err error
|
|
||||||
if stty, err = exec.LookPath("stty"); err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func ReadLineSilent() ([]byte, error) {
|
|
||||||
pid, err := setEcho(false)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer setEcho(true)
|
|
||||||
|
|
||||||
syscall.Wait4(pid, nil, 0, nil)
|
|
||||||
|
|
||||||
l, _, err := bufio.NewReader(os.Stdin).ReadLine()
|
|
||||||
return l, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func setEcho(on bool) (pid int, err error) {
|
|
||||||
fds := []uintptr{os.Stdin.Fd(), os.Stdout.Fd(), os.Stderr.Fd()}
|
|
||||||
|
|
||||||
if on {
|
|
||||||
pid, err = syscall.ForkExec(stty, []string{"stty", "echo"}, &syscall.ProcAttr{Files: fds})
|
|
||||||
} else {
|
|
||||||
pid, err = syscall.ForkExec(stty, []string{"stty", "-echo"}, &syscall.ProcAttr{Files: fds})
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
@ -1,151 +0,0 @@
|
||||||
package update
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/utils"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
updateInstallFlag bool
|
|
||||||
)
|
|
||||||
|
|
||||||
var Cmd = &cobra.Command{
|
|
||||||
Use: "update",
|
|
||||||
Short: "Check if updates are available",
|
|
||||||
Args: cobra.MaximumNArgs(0),
|
|
||||||
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return update()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
Cmd.Flags().BoolVarP(&updateInstallFlag,
|
|
||||||
"install",
|
|
||||||
"i",
|
|
||||||
false,
|
|
||||||
"If set and a new version is available, the new version gets installed")
|
|
||||||
}
|
|
||||||
|
|
||||||
func update() error {
|
|
||||||
var release map[string]interface{}
|
|
||||||
|
|
||||||
resp, err := utils.Client.Get("https://api.github.com/repos/crunchy-labs/crunchy-cli/releases/latest")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
if err = json.NewDecoder(resp.Body).Decode(&release); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
releaseVersion := strings.TrimPrefix(release["tag_name"].(string), "v")
|
|
||||||
|
|
||||||
if utils.Version == "development" {
|
|
||||||
utils.Log.Info("Development version, update service not available")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
latestRelease := strings.SplitN(releaseVersion, ".", 4)
|
|
||||||
if len(latestRelease) != 3 {
|
|
||||||
return fmt.Errorf("latest tag name (%s) is not parsable", releaseVersion)
|
|
||||||
}
|
|
||||||
|
|
||||||
internalVersion := strings.SplitN(utils.Version, ".", 4)
|
|
||||||
if len(internalVersion) != 3 {
|
|
||||||
return fmt.Errorf("internal version (%s) is not parsable", utils.Version)
|
|
||||||
}
|
|
||||||
|
|
||||||
utils.Log.Info("Installed version is %s", utils.Version)
|
|
||||||
|
|
||||||
var hasUpdate bool
|
|
||||||
for i := 0; i < 3; i++ {
|
|
||||||
if latestRelease[i] < internalVersion[i] {
|
|
||||||
utils.Log.Info("Local version is newer than version in latest release (%s)", releaseVersion)
|
|
||||||
return nil
|
|
||||||
} else if latestRelease[i] > internalVersion[i] {
|
|
||||||
hasUpdate = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasUpdate {
|
|
||||||
utils.Log.Info("Version is up-to-date")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
utils.Log.Info("A new version is available (%s): https://github.com/crunchy-labs/crunchy-cli/releases/tag/v%s", releaseVersion, releaseVersion)
|
|
||||||
|
|
||||||
if updateInstallFlag {
|
|
||||||
if runtime.GOARCH != "amd64" {
|
|
||||||
return fmt.Errorf("invalid architecture found (%s), only amd64 is currently supported for automatic updating. "+
|
|
||||||
"You have to update manually (https://github.com/crunchy-labs/crunchy-cli)", runtime.GOARCH)
|
|
||||||
}
|
|
||||||
|
|
||||||
var downloadFile string
|
|
||||||
switch runtime.GOOS {
|
|
||||||
case "linux":
|
|
||||||
pacmanCommand := exec.Command("pacman -Q crunchy-cli")
|
|
||||||
if pacmanCommand.Run() == nil && pacmanCommand.ProcessState.Success() {
|
|
||||||
utils.Log.Info("crunchy-cli was probably installed via an Arch Linux AUR helper (like yay). Updating via this AUR helper is recommended")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
downloadFile = fmt.Sprintf("crunchy-v%s_linux", releaseVersion)
|
|
||||||
case "darwin":
|
|
||||||
downloadFile = fmt.Sprintf("crunchy-v%s_darwin", releaseVersion)
|
|
||||||
case "windows":
|
|
||||||
downloadFile = fmt.Sprintf("crunchy-v%s_windows.exe", releaseVersion)
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("invalid operation system found (%s), only linux, windows and darwin / macos are currently supported. "+
|
|
||||||
"You have to update manually (https://github.com/crunchy-labs/crunchy-cli", runtime.GOOS)
|
|
||||||
}
|
|
||||||
|
|
||||||
executePath := os.Args[0]
|
|
||||||
var perms os.FileInfo
|
|
||||||
// check if the path is relative, absolute or non (if so, the executable must be in PATH)
|
|
||||||
if strings.HasPrefix(executePath, "."+string(os.PathSeparator)) || path.IsAbs(executePath) {
|
|
||||||
if perms, err = os.Stat(os.Args[0]); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
executePath, err = exec.LookPath(os.Args[0])
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if perms, err = os.Stat(executePath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
utils.Log.SetProcess("Updating executable %s", executePath)
|
|
||||||
|
|
||||||
if err = os.Remove(executePath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
executeFile, err := os.OpenFile(executePath, os.O_CREATE|os.O_WRONLY, perms.Mode())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer executeFile.Close()
|
|
||||||
|
|
||||||
resp, err := utils.Client.Get(fmt.Sprintf("https://github.com/crunchy-labs/crunchy-cli/releases/download/v%s/%s", releaseVersion, downloadFile))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
if _, err = io.Copy(executeFile, resp.Body); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
utils.Log.StopProcess("Updated executable %s", executePath)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,125 +0,0 @@
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/utils"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"runtime"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
type DownloadProgress struct {
|
|
||||||
Prefix string
|
|
||||||
Message string
|
|
||||||
|
|
||||||
Total int
|
|
||||||
Current int
|
|
||||||
|
|
||||||
Dev bool
|
|
||||||
Quiet bool
|
|
||||||
|
|
||||||
lock sync.Mutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dp *DownloadProgress) Update() {
|
|
||||||
dp.update("", false)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dp *DownloadProgress) UpdateMessage(msg string, permanent bool) {
|
|
||||||
dp.update(msg, permanent)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dp *DownloadProgress) update(msg string, permanent bool) {
|
|
||||||
if dp.Quiet {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if dp.Current >= dp.Total {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
dp.lock.Lock()
|
|
||||||
defer dp.lock.Unlock()
|
|
||||||
dp.Current++
|
|
||||||
|
|
||||||
if msg == "" {
|
|
||||||
msg = dp.Message
|
|
||||||
}
|
|
||||||
if permanent {
|
|
||||||
dp.Message = msg
|
|
||||||
}
|
|
||||||
|
|
||||||
if dp.Dev {
|
|
||||||
fmt.Printf("%s%s\n", dp.Prefix, msg)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
percentage := float32(dp.Current) / float32(dp.Total) * 100
|
|
||||||
|
|
||||||
pre := fmt.Sprintf("%s%s [", dp.Prefix, msg)
|
|
||||||
post := fmt.Sprintf("]%4d%% %8d/%d", int(percentage), dp.Current, dp.Total)
|
|
||||||
|
|
||||||
// I don't really know why +2 is needed here but without it the Printf below would not print to the line end
|
|
||||||
progressWidth := terminalWidth() - len(pre) - len(post) + 2
|
|
||||||
repeatCount := int(percentage / float32(100) * float32(progressWidth))
|
|
||||||
// it can be lower than zero when the terminal is very tiny
|
|
||||||
if repeatCount < 0 {
|
|
||||||
repeatCount = 0
|
|
||||||
}
|
|
||||||
progressPercentage := strings.Repeat("=", repeatCount)
|
|
||||||
if dp.Current != dp.Total {
|
|
||||||
progressPercentage += ">"
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf("\r%s%-"+fmt.Sprint(progressWidth)+"s%s", pre, progressPercentage, post)
|
|
||||||
}
|
|
||||||
|
|
||||||
func terminalWidth() int {
|
|
||||||
if runtime.GOOS != "windows" {
|
|
||||||
cmd := exec.Command("stty", "size")
|
|
||||||
cmd.Stdin = os.Stdin
|
|
||||||
res, err := cmd.Output()
|
|
||||||
if err != nil {
|
|
||||||
return 60
|
|
||||||
}
|
|
||||||
// on alpine linux the command `stty size` does not respond the terminal size
|
|
||||||
// but something like "stty: standard input". this may also apply to other systems
|
|
||||||
splitOutput := strings.SplitN(strings.ReplaceAll(string(res), "\n", ""), " ", 2)
|
|
||||||
if len(splitOutput) == 1 {
|
|
||||||
return 60
|
|
||||||
}
|
|
||||||
width, err := strconv.Atoi(splitOutput[1])
|
|
||||||
if err != nil {
|
|
||||||
return 60
|
|
||||||
}
|
|
||||||
return width
|
|
||||||
}
|
|
||||||
return 60
|
|
||||||
}
|
|
||||||
|
|
||||||
func LoadCrunchy() error {
|
|
||||||
var encryptionKey []byte
|
|
||||||
|
|
||||||
if utils.IsTempSession() {
|
|
||||||
encryptionKey = nil
|
|
||||||
} else {
|
|
||||||
if encrypted, err := utils.IsSavedSessionEncrypted(); err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return fmt.Errorf("to use this command, login first. Type `%s login -h` to get help", os.Args[0])
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
} else if encrypted {
|
|
||||||
encryptionKey, err = ReadLineSilent()
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to read password")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var err error
|
|
||||||
utils.Crunchy, err = utils.LoadSession(encryptionKey)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
@ -1,41 +0,0 @@
|
||||||
//go:build windows
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"os"
|
|
||||||
"syscall"
|
|
||||||
)
|
|
||||||
|
|
||||||
// https://github.com/bgentry/speakeasy/blob/master/speakeasy_windows.go
|
|
||||||
func ReadLineSilent() ([]byte, error) {
|
|
||||||
var oldMode uint32
|
|
||||||
|
|
||||||
if err := syscall.GetConsoleMode(syscall.Stdin, &oldMode); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
newMode := oldMode &^ 0x0004
|
|
||||||
|
|
||||||
err := setConsoleMode(syscall.Stdin, newMode)
|
|
||||||
defer setConsoleMode(syscall.Stdin, oldMode)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
l, _, err := bufio.NewReader(os.Stdin).ReadLine()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return l, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func setConsoleMode(console syscall.Handle, mode uint32) error {
|
|
||||||
dll := syscall.MustLoadDLL("kernel32")
|
|
||||||
proc := dll.MustFindProc("SetConsoleMode")
|
|
||||||
_, _, err := proc.Call(uintptr(console), uintptr(mode))
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
99
cli/root.go
99
cli/root.go
|
|
@ -1,99 +0,0 @@
|
||||||
package cli
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands/archive"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands/download"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands/info"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands/login"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli/commands/update"
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/utils"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3"
|
|
||||||
crunchyUtils "github.com/crunchy-labs/crunchyroll-go/v3/utils"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"os"
|
|
||||||
"runtime/debug"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
quietFlag bool
|
|
||||||
verboseFlag bool
|
|
||||||
|
|
||||||
proxyFlag string
|
|
||||||
|
|
||||||
langFlag string
|
|
||||||
|
|
||||||
useragentFlag string
|
|
||||||
)
|
|
||||||
|
|
||||||
var RootCmd = &cobra.Command{
|
|
||||||
Use: "crunchy-cli",
|
|
||||||
Version: utils.Version,
|
|
||||||
Short: "Download crunchyroll videos with ease. See the wiki for details about the cli and library: https://github.com/crunchy-labs/crunchy-cli/wiki",
|
|
||||||
|
|
||||||
SilenceErrors: true,
|
|
||||||
SilenceUsage: true,
|
|
||||||
|
|
||||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) (err error) {
|
|
||||||
if verboseFlag {
|
|
||||||
utils.Log = commands.NewLogger(true, true, true)
|
|
||||||
} else if quietFlag {
|
|
||||||
utils.Log = commands.NewLogger(false, false, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
if langFlag != "" {
|
|
||||||
if !crunchyUtils.ValidateLocale(crunchyroll.LOCALE(langFlag)) {
|
|
||||||
return fmt.Errorf("'%s' is not a valid language. Choose from %s", langFlag, strings.Join(utils.LocalesAsStrings(), ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
os.Setenv("CRUNCHY_LANG", langFlag)
|
|
||||||
}
|
|
||||||
|
|
||||||
utils.Log.Debug("Executing `%s` command with %d arg(s)", cmd.Name(), len(args))
|
|
||||||
|
|
||||||
utils.Client, err = utils.CreateOrDefaultClient(proxyFlag, useragentFlag)
|
|
||||||
return
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
RootCmd.PersistentFlags().BoolVarP(&quietFlag, "quiet", "q", false, "Disable all output")
|
|
||||||
RootCmd.PersistentFlags().BoolVarP(&verboseFlag, "verbose", "v", false, "Adds debug messages to the normal output")
|
|
||||||
|
|
||||||
RootCmd.PersistentFlags().StringVarP(&proxyFlag, "proxy", "p", "", "Proxy to use")
|
|
||||||
|
|
||||||
RootCmd.PersistentFlags().StringVar(&langFlag, "lang", "", fmt.Sprintf("Set language to use. If not set, it's received from the system locale dynamically. Choose from: %s", strings.Join(utils.LocalesAsStrings(), ", ")))
|
|
||||||
|
|
||||||
RootCmd.PersistentFlags().StringVar(&useragentFlag, "useragent", fmt.Sprintf("crunchy-cli/%s", utils.Version), "Useragent to do all request with")
|
|
||||||
|
|
||||||
RootCmd.AddCommand(archive.Cmd)
|
|
||||||
RootCmd.AddCommand(download.Cmd)
|
|
||||||
RootCmd.AddCommand(info.Cmd)
|
|
||||||
RootCmd.AddCommand(login.Cmd)
|
|
||||||
RootCmd.AddCommand(update.Cmd)
|
|
||||||
|
|
||||||
utils.Log = commands.NewLogger(false, true, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Execute() {
|
|
||||||
RootCmd.CompletionOptions.HiddenDefaultCmd = true
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
if utils.Log.IsDev() {
|
|
||||||
utils.Log.Err("%v: %s", r, debug.Stack())
|
|
||||||
} else {
|
|
||||||
utils.Log.Err("Unexpected error: %v", r)
|
|
||||||
}
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
if err := RootCmd.Execute(); err != nil {
|
|
||||||
if !strings.HasSuffix(err.Error(), context.Canceled.Error()) {
|
|
||||||
utils.Log.Err("An error occurred: %v", err)
|
|
||||||
}
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
29
crunchy-cli-core/Cargo.toml
Normal file
29
crunchy-cli-core/Cargo.toml
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
[package]
|
||||||
|
name = "crunchy-cli-core"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
# Embed a static curl library into the binary instead of just linking it.
|
||||||
|
static-curl = ["crunchyroll-rs/static-curl"]
|
||||||
|
# Embed a static openssl library into the binary instead of just linking it. If you want to compile this project against
|
||||||
|
# musl and have openssl issues, this might solve these issues.
|
||||||
|
static-ssl = ["crunchyroll-rs/static-ssl"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "1.0"
|
||||||
|
async-trait = "0.1"
|
||||||
|
clap = { version = "4.0", features = ["derive"] }
|
||||||
|
chrono = "0.4"
|
||||||
|
crunchyroll-rs = { git = "https://github.com/crunchy-labs/crunchyroll-rs", default-features = false, features = ["stream", "parse"] }
|
||||||
|
ctrlc = "3.2"
|
||||||
|
dirs = "4.0"
|
||||||
|
isahc = { git = "https://github.com/sagebind/isahc", rev = "34f158ef" }
|
||||||
|
log = { version = "0.4", features = ["std"] }
|
||||||
|
num_cpus = "1.13"
|
||||||
|
regex = "1.6"
|
||||||
|
signal-hook = "0.3"
|
||||||
|
tempfile = "3.3"
|
||||||
|
terminal_size = "0.2"
|
||||||
|
tokio = { version = "1.21", features = ["macros", "rt-multi-thread", "time"] }
|
||||||
|
sys-locale = "0.2"
|
||||||
567
crunchy-cli-core/src/cli/archive.rs
Normal file
567
crunchy-cli-core/src/cli/archive.rs
Normal file
|
|
@ -0,0 +1,567 @@
|
||||||
|
use crate::cli::log::tab_info;
|
||||||
|
use crate::cli::utils::{download_segments, find_resolution};
|
||||||
|
use crate::utils::context::Context;
|
||||||
|
use crate::utils::format::{format_string, Format};
|
||||||
|
use crate::utils::log::progress;
|
||||||
|
use crate::utils::os::{free_file, tempfile};
|
||||||
|
use crate::utils::parse::{parse_url, UrlFilter};
|
||||||
|
use crate::utils::sort::{sort_formats_after_seasons, sort_seasons_after_number};
|
||||||
|
use crate::Execute;
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use crunchyroll_rs::media::{Resolution, StreamSubtitle};
|
||||||
|
use crunchyroll_rs::{Locale, Media, MediaCollection, Series};
|
||||||
|
use log::{debug, error, info};
|
||||||
|
use regex::Regex;
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::process::{Command, Stdio};
|
||||||
|
use tempfile::TempPath;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum MergeBehavior {
|
||||||
|
Auto,
|
||||||
|
Audio,
|
||||||
|
Video,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_merge_behavior(s: &str) -> Result<MergeBehavior, String> {
|
||||||
|
Ok(match s.to_lowercase().as_str() {
|
||||||
|
"auto" => MergeBehavior::Auto,
|
||||||
|
"audio" => MergeBehavior::Audio,
|
||||||
|
"video" => MergeBehavior::Video,
|
||||||
|
_ => return Err(format!("'{}' is not a valid merge behavior", s)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, clap::Parser)]
|
||||||
|
#[clap(about = "Archive a video")]
|
||||||
|
#[command(arg_required_else_help(true))]
|
||||||
|
#[command()]
|
||||||
|
pub struct Archive {
|
||||||
|
#[arg(help = format!("Audio languages. Can be used multiple times. \
|
||||||
|
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
|
#[arg(long_help = format!("Audio languages. Can be used multiple times. \
|
||||||
|
Available languages are:\n{}", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
||||||
|
#[arg(short, long, default_values_t = vec![crate::utils::locale::system_locale(), Locale::ja_JP])]
|
||||||
|
locale: Vec<Locale>,
|
||||||
|
#[arg(help = format!("Subtitle languages. Can be used multiple times. \
|
||||||
|
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
|
#[arg(long_help = format!("Subtitle languages. Can be used multiple times. \
|
||||||
|
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
|
#[arg(short, long, default_values_t = Locale::all())]
|
||||||
|
subtitle: Vec<Locale>,
|
||||||
|
|
||||||
|
#[arg(help = "Name of the output file")]
|
||||||
|
#[arg(long_help = "Name of the output file.\
|
||||||
|
If you use one of the following pattern they will get replaced:\n \
|
||||||
|
{title} → Title of the video\n \
|
||||||
|
{series_name} → Name of the series\n \
|
||||||
|
{season_name} → Name of the season\n \
|
||||||
|
{audio} → Audio language of the video\n \
|
||||||
|
{resolution} → Resolution of the video\n \
|
||||||
|
{season_number} → Number of the season\n \
|
||||||
|
{episode_number} → Number of the episode\n \
|
||||||
|
{series_id} → ID of the series\n \
|
||||||
|
{season_id} → ID of the season\n \
|
||||||
|
{episode_id} → ID of the episode")]
|
||||||
|
#[arg(short, long, default_value = "{title}.mkv")]
|
||||||
|
output: String,
|
||||||
|
|
||||||
|
#[arg(help = "Video resolution")]
|
||||||
|
#[arg(long_help = "The video resolution.\
|
||||||
|
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
|
||||||
|
Specifying the exact pixels is not recommended, use one of the other options instead. \
|
||||||
|
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
|
||||||
|
The available common-use words are 'best' (choose the best resolution available) and 'worst' (worst resolution available)")]
|
||||||
|
#[arg(short, long, default_value = "best")]
|
||||||
|
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
|
||||||
|
resolution: Resolution,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio' and 'video'"
|
||||||
|
)]
|
||||||
|
#[arg(
|
||||||
|
long_help = "Because of local restrictions (or other reasons) some episodes with different languages does not have the same length (e.g. when some scenes were cut out). \
|
||||||
|
With this flag you can set the behavior when handling multiple language.
|
||||||
|
Valid options are 'audio' (stores one video and all other languages as audio only), 'video' (stores the video + audio for every language) and 'auto' (detects if videos differ in length: if so, behave like 'video' else like 'audio')"
|
||||||
|
)]
|
||||||
|
#[arg(short, long, default_value = "auto")]
|
||||||
|
#[arg(value_parser = parse_merge_behavior)]
|
||||||
|
merge: MergeBehavior,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
help = "Set which subtitle language should be set as default / auto shown when starting a video"
|
||||||
|
)]
|
||||||
|
#[arg(long)]
|
||||||
|
default_subtitle: Option<Locale>,
|
||||||
|
#[arg(help = "Disable subtitle optimizations")]
|
||||||
|
#[arg(
|
||||||
|
long_help = "By default, Crunchyroll delivers subtitles in a format which may cause issues in some video players. \
|
||||||
|
These issues are fixed internally by setting a flag which is not part of the official specification of the subtitle format. \
|
||||||
|
If you do not want this fixes or they cause more trouble than they solve (for you), it can be disabled with this flag"
|
||||||
|
)]
|
||||||
|
#[arg(long)]
|
||||||
|
no_subtitle_optimizations: bool,
|
||||||
|
|
||||||
|
#[arg(help = "Crunchyroll series url(s)")]
|
||||||
|
urls: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl Execute for Archive {
|
||||||
|
async fn execute(self, ctx: Context) -> Result<()> {
|
||||||
|
let mut parsed_urls = vec![];
|
||||||
|
|
||||||
|
for (i, url) in self.urls.iter().enumerate() {
|
||||||
|
let _progress_handler = progress!("Parsing url {}", i + 1);
|
||||||
|
match parse_url(&ctx.crunchy, url.clone(), true).await {
|
||||||
|
Ok((media_collection, url_filter)) => {
|
||||||
|
parsed_urls.push((media_collection, url_filter));
|
||||||
|
info!("Parsed url {}", i + 1)
|
||||||
|
}
|
||||||
|
Err(e) => bail!("url {} could not be parsed: {}", url, e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
|
||||||
|
let archive_formats = match media_collection {
|
||||||
|
MediaCollection::Series(series) => {
|
||||||
|
let _progress_handler = progress!("Fetching series details");
|
||||||
|
formats_from_series(&self, series, &url_filter).await?
|
||||||
|
}
|
||||||
|
MediaCollection::Season(_) => bail!("Archiving a season is not supported"),
|
||||||
|
MediaCollection::Episode(episode) => bail!("Archiving a episode is not supported. Use url filtering instead to specify the episode (https://www.crunchyroll.com/series/{}/{}[S{}E{}])", episode.metadata.series_id, episode.metadata.series_slug_title, episode.metadata.season_number, episode.metadata.episode_number),
|
||||||
|
MediaCollection::MovieListing(_) => bail!("Archiving a movie listing is not supported"),
|
||||||
|
MediaCollection::Movie(_) => bail!("Archiving a movie is not supported")
|
||||||
|
};
|
||||||
|
|
||||||
|
if archive_formats.is_empty() {
|
||||||
|
info!("Skipping url {} (no matching episodes found)", i + 1);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
info!("Loaded series information for url {}", i + 1);
|
||||||
|
|
||||||
|
if log::max_level() == log::Level::Debug {
|
||||||
|
let seasons = sort_formats_after_seasons(
|
||||||
|
archive_formats
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.map(|(a, _)| a.get(0).unwrap().clone())
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
debug!("Series has {} seasons", seasons.len());
|
||||||
|
for (i, season) in seasons.into_iter().enumerate() {
|
||||||
|
info!("Season {} ({})", i + 1, season.get(0).unwrap().season_title);
|
||||||
|
for format in season {
|
||||||
|
info!(
|
||||||
|
"{}: {}px, {:.02} FPS (S{:02}E{:02})",
|
||||||
|
format.title,
|
||||||
|
format.stream.resolution,
|
||||||
|
format.stream.fps,
|
||||||
|
format.season_number,
|
||||||
|
format.number,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for season in sort_formats_after_seasons(
|
||||||
|
archive_formats
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.map(|(a, _)| a.get(0).unwrap().clone())
|
||||||
|
.collect(),
|
||||||
|
) {
|
||||||
|
let first = season.get(0).unwrap();
|
||||||
|
info!(
|
||||||
|
"{} Season {} ({})",
|
||||||
|
first.series_name, first.season_number, first.season_title
|
||||||
|
);
|
||||||
|
|
||||||
|
for (i, format) in season.into_iter().enumerate() {
|
||||||
|
tab_info!(
|
||||||
|
"{}. {} » {}px, {:.2} FPS (S{:02}E{:02})",
|
||||||
|
i + 1,
|
||||||
|
format.title,
|
||||||
|
format.stream.resolution,
|
||||||
|
format.stream.fps,
|
||||||
|
format.season_number,
|
||||||
|
format.number
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (formats, subtitles) in archive_formats {
|
||||||
|
let (primary, additionally) = formats.split_first().unwrap();
|
||||||
|
|
||||||
|
let mut path = PathBuf::from(&self.output);
|
||||||
|
path = free_file(
|
||||||
|
path.with_file_name(format_string(
|
||||||
|
if let Some(fname) = path.file_name() {
|
||||||
|
fname.to_str().unwrap()
|
||||||
|
} else {
|
||||||
|
"{title}.mkv"
|
||||||
|
}
|
||||||
|
.to_string(),
|
||||||
|
primary,
|
||||||
|
)),
|
||||||
|
)
|
||||||
|
.0;
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Downloading {} to '{}'",
|
||||||
|
primary.title,
|
||||||
|
path.to_str().unwrap()
|
||||||
|
);
|
||||||
|
tab_info!(
|
||||||
|
"Episode: S{:02}E{:02}",
|
||||||
|
primary.season_number,
|
||||||
|
primary.number
|
||||||
|
);
|
||||||
|
tab_info!(
|
||||||
|
"Audio: {} (primary), {}",
|
||||||
|
primary.audio,
|
||||||
|
additionally
|
||||||
|
.iter()
|
||||||
|
.map(|a| a.audio.to_string())
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", ")
|
||||||
|
);
|
||||||
|
tab_info!(
|
||||||
|
"Subtitle: {}",
|
||||||
|
subtitles
|
||||||
|
.iter()
|
||||||
|
.map(|s| {
|
||||||
|
if let Some(default) = &self.default_subtitle {
|
||||||
|
if default == &s.locale {
|
||||||
|
return format!("{} (primary)", default);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.locale.to_string()
|
||||||
|
})
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", ")
|
||||||
|
);
|
||||||
|
tab_info!("Resolution: {}", primary.stream.resolution);
|
||||||
|
tab_info!("FPS: {:.2}", primary.stream.fps);
|
||||||
|
|
||||||
|
let mut video_paths = vec![];
|
||||||
|
let mut audio_paths = vec![];
|
||||||
|
let mut subtitle_paths = vec![];
|
||||||
|
|
||||||
|
video_paths.push((download_video(&ctx, primary, false).await?, primary));
|
||||||
|
for additional in additionally {
|
||||||
|
let only_audio = match self.merge {
|
||||||
|
MergeBehavior::Auto => additionally
|
||||||
|
.iter()
|
||||||
|
.all(|a| a.stream.bandwidth == primary.stream.bandwidth),
|
||||||
|
MergeBehavior::Audio => true,
|
||||||
|
MergeBehavior::Video => false,
|
||||||
|
};
|
||||||
|
let path = download_video(&ctx, additional, only_audio).await?;
|
||||||
|
if only_audio {
|
||||||
|
audio_paths.push((path, additional))
|
||||||
|
} else {
|
||||||
|
video_paths.push((path, additional))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for subtitle in subtitles {
|
||||||
|
subtitle_paths
|
||||||
|
.push((download_subtitle(&self, subtitle.clone()).await?, subtitle))
|
||||||
|
}
|
||||||
|
|
||||||
|
generate_mkv(&self, path, video_paths, audio_paths, subtitle_paths)?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn formats_from_series(
|
||||||
|
archive: &Archive,
|
||||||
|
series: Media<Series>,
|
||||||
|
url_filter: &UrlFilter,
|
||||||
|
) -> Result<Vec<(Vec<Format>, Vec<StreamSubtitle>)>> {
|
||||||
|
let mut seasons = series.seasons().await?;
|
||||||
|
|
||||||
|
// filter any season out which does not contain the specified audio languages
|
||||||
|
for season in sort_seasons_after_number(seasons.clone()) {
|
||||||
|
// get all locales which are specified but not present in the current iterated season and
|
||||||
|
// print an error saying this
|
||||||
|
let not_present_audio = archive
|
||||||
|
.locale
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.filter(|l| !season.iter().any(|s| &s.metadata.audio_locale == l))
|
||||||
|
.collect::<Vec<Locale>>();
|
||||||
|
for not_present in not_present_audio {
|
||||||
|
error!(
|
||||||
|
"Season {} of series {} is not available with {} audio",
|
||||||
|
season.first().unwrap().metadata.season_number,
|
||||||
|
series.title,
|
||||||
|
not_present
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove all seasons with the wrong audio for the current iterated season number
|
||||||
|
seasons.retain(|s| {
|
||||||
|
s.metadata.season_number != season.first().unwrap().metadata.season_number
|
||||||
|
|| archive.locale.contains(&s.metadata.audio_locale)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
let mut result: BTreeMap<u32, BTreeMap<u32, (Vec<Format>, Vec<StreamSubtitle>)>> =
|
||||||
|
BTreeMap::new();
|
||||||
|
for season in series.seasons().await? {
|
||||||
|
if !url_filter.is_season_valid(season.metadata.season_number)
|
||||||
|
|| !archive.locale.contains(&season.metadata.audio_locale)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
for episode in season.episodes().await? {
|
||||||
|
if !url_filter.is_episode_valid(
|
||||||
|
episode.metadata.episode_number,
|
||||||
|
episode.metadata.season_number,
|
||||||
|
) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let streams = episode.streams().await?;
|
||||||
|
let streaming_data = streams.streaming_data(None).await?;
|
||||||
|
let Some(stream) = find_resolution(streaming_data, &archive.resolution) else {
|
||||||
|
bail!(
|
||||||
|
"Resolution ({}x{}) is not available for episode {} ({}) of season {} ({}) of {}",
|
||||||
|
archive.resolution.width,
|
||||||
|
archive.resolution.height,
|
||||||
|
episode.metadata.episode_number,
|
||||||
|
episode.title,
|
||||||
|
episode.metadata.season_number,
|
||||||
|
episode.metadata.season_title,
|
||||||
|
episode.metadata.series_title
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let (ref mut formats, _) = result
|
||||||
|
.entry(season.metadata.season_number)
|
||||||
|
.or_insert_with(BTreeMap::new)
|
||||||
|
.entry(episode.metadata.episode_number)
|
||||||
|
.or_insert_with(|| {
|
||||||
|
let subtitles: Vec<StreamSubtitle> = archive
|
||||||
|
.subtitle
|
||||||
|
.iter()
|
||||||
|
.filter_map(|l| streams.subtitles.get(l).cloned())
|
||||||
|
.collect();
|
||||||
|
(vec![], subtitles)
|
||||||
|
});
|
||||||
|
formats.push(Format::new_from_episode(episode, stream));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result.into_values().flat_map(|v| v.into_values()).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download_video(ctx: &Context, format: &Format, only_audio: bool) -> Result<TempPath> {
|
||||||
|
let tempfile = if only_audio {
|
||||||
|
tempfile(".aac")?
|
||||||
|
} else {
|
||||||
|
tempfile(".ts")?
|
||||||
|
};
|
||||||
|
let (_, path) = tempfile.into_parts();
|
||||||
|
|
||||||
|
let ffmpeg = Command::new("ffmpeg")
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stdout(Stdio::null())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.arg("-y")
|
||||||
|
.args(["-f", "mpegts", "-i", "pipe:"])
|
||||||
|
.args(if only_audio { vec!["-vn"] } else { vec![] })
|
||||||
|
.arg(path.to_str().unwrap())
|
||||||
|
.spawn()?;
|
||||||
|
|
||||||
|
download_segments(
|
||||||
|
ctx,
|
||||||
|
&mut ffmpeg.stdin.unwrap(),
|
||||||
|
Some(format!("Download {}", format.audio)),
|
||||||
|
format.stream.segments().await?,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download_subtitle(archive: &Archive, subtitle: StreamSubtitle) -> Result<TempPath> {
|
||||||
|
let tempfile = tempfile(".ass")?;
|
||||||
|
let (mut file, path) = tempfile.into_parts();
|
||||||
|
|
||||||
|
let mut buf = vec![];
|
||||||
|
subtitle.write_to(&mut buf).await?;
|
||||||
|
if !archive.no_subtitle_optimizations {
|
||||||
|
buf = fix_subtitle(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
file.write_all(buf.as_slice())?;
|
||||||
|
|
||||||
|
Ok(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add `ScaledBorderAndShadows: yes` to subtitles; without it they look very messy on some video
|
||||||
|
/// players. See [crunchy-labs/crunchy-cli#66](https://github.com/crunchy-labs/crunchy-cli/issues/66)
|
||||||
|
/// for more information.
|
||||||
|
fn fix_subtitle(raw: Vec<u8>) -> Vec<u8> {
|
||||||
|
let mut script_info = false;
|
||||||
|
let mut new = String::new();
|
||||||
|
|
||||||
|
for line in String::from_utf8_lossy(raw.as_slice()).split('\n') {
|
||||||
|
if line.trim().starts_with('[') && script_info {
|
||||||
|
new.push_str("ScaledBorderAndShadows: yes\n");
|
||||||
|
script_info = false
|
||||||
|
} else if line.trim() == "[Script Info]" {
|
||||||
|
script_info = true
|
||||||
|
}
|
||||||
|
new.push_str(line);
|
||||||
|
new.push('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
new.into_bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_mkv(
|
||||||
|
archive: &Archive,
|
||||||
|
target: PathBuf,
|
||||||
|
video_paths: Vec<(TempPath, &Format)>,
|
||||||
|
audio_paths: Vec<(TempPath, &Format)>,
|
||||||
|
subtitle_paths: Vec<(TempPath, StreamSubtitle)>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut input = vec![];
|
||||||
|
let mut maps = vec![];
|
||||||
|
let mut metadata = vec![];
|
||||||
|
|
||||||
|
let mut video_length = (0, 0, 0, 0);
|
||||||
|
|
||||||
|
for (i, (video_path, format)) in video_paths.iter().enumerate() {
|
||||||
|
input.extend(["-i".to_string(), video_path.to_string_lossy().to_string()]);
|
||||||
|
maps.extend(["-map".to_string(), i.to_string()]);
|
||||||
|
metadata.extend([
|
||||||
|
format!("-metadata:s:v:{}", i),
|
||||||
|
format!("language={}", format.audio),
|
||||||
|
]);
|
||||||
|
metadata.extend([
|
||||||
|
format!("-metadata:s:v:{}", i),
|
||||||
|
format!("title={}", format.audio.to_human_readable()),
|
||||||
|
]);
|
||||||
|
metadata.extend([
|
||||||
|
format!("-metadata:s:a:{}", i),
|
||||||
|
format!("language={}", format.audio),
|
||||||
|
]);
|
||||||
|
metadata.extend([
|
||||||
|
format!("-metadata:s:a:{}", i),
|
||||||
|
format!("title={}", format.audio.to_human_readable()),
|
||||||
|
]);
|
||||||
|
|
||||||
|
let vid_len = get_video_length(video_path.to_path_buf())?;
|
||||||
|
if vid_len > video_length {
|
||||||
|
video_length = vid_len
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (i, (audio_path, format)) in audio_paths.iter().enumerate() {
|
||||||
|
input.extend(["-i".to_string(), audio_path.to_string_lossy().to_string()]);
|
||||||
|
maps.extend(["-map".to_string(), (i + video_paths.len()).to_string()]);
|
||||||
|
metadata.extend([
|
||||||
|
format!("-metadata:s:a:{}", i + video_paths.len()),
|
||||||
|
format!("language={}", format.audio),
|
||||||
|
]);
|
||||||
|
metadata.extend([
|
||||||
|
format!("-metadata:s:a:{}", i + video_paths.len()),
|
||||||
|
format!("title={}", format.audio.to_human_readable()),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
for (i, (subtitle_path, subtitle)) in subtitle_paths.iter().enumerate() {
|
||||||
|
input.extend([
|
||||||
|
"-i".to_string(),
|
||||||
|
subtitle_path.to_string_lossy().to_string(),
|
||||||
|
]);
|
||||||
|
maps.extend([
|
||||||
|
"-map".to_string(),
|
||||||
|
(i + video_paths.len() + audio_paths.len()).to_string(),
|
||||||
|
]);
|
||||||
|
metadata.extend([
|
||||||
|
format!("-metadata:s:s:{}", i),
|
||||||
|
format!("language={}", subtitle.locale),
|
||||||
|
]);
|
||||||
|
metadata.extend([
|
||||||
|
format!("-metadata:s:s:{}", i),
|
||||||
|
format!("title={}", subtitle.locale.to_human_readable()),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut command_args = vec!["-y".to_string()];
|
||||||
|
command_args.extend(input);
|
||||||
|
command_args.extend(maps);
|
||||||
|
command_args.extend(metadata);
|
||||||
|
|
||||||
|
// set default subtitle
|
||||||
|
if let Some(default_subtitle) = &archive.default_subtitle {
|
||||||
|
// if `--default_subtitle <locale>` is given set the default subtitle to the given locale
|
||||||
|
if let Some(position) = subtitle_paths
|
||||||
|
.into_iter()
|
||||||
|
.position(|s| &s.1.locale == default_subtitle)
|
||||||
|
{
|
||||||
|
command_args.push(format!("-disposition:s:{}", position))
|
||||||
|
} else {
|
||||||
|
command_args.extend(["-disposition:s:0".to_string(), "0".to_string()])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
command_args.extend(["-disposition:s:0".to_string(), "0".to_string()])
|
||||||
|
}
|
||||||
|
|
||||||
|
command_args.extend([
|
||||||
|
"-c".to_string(),
|
||||||
|
"copy".to_string(),
|
||||||
|
"-f".to_string(),
|
||||||
|
"matroska".to_string(),
|
||||||
|
target.to_string_lossy().to_string(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
debug!("ffmpeg {}", command_args.join(" "));
|
||||||
|
|
||||||
|
let ffmpeg = Command::new("ffmpeg")
|
||||||
|
.stdout(Stdio::null())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.args(command_args)
|
||||||
|
.output()?;
|
||||||
|
if !ffmpeg.status.success() {
|
||||||
|
bail!("{}", String::from_utf8_lossy(ffmpeg.stderr.as_slice()))
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the length of a video. This is required because sometimes subtitles have an unnecessary entry
|
||||||
|
/// long after the actual video ends with artificially extends the video length on some video players.
|
||||||
|
/// To prevent this, the video length must be hard set with ffmpeg. See
|
||||||
|
/// [crunchy-labs/crunchy-cli#32](https://github.com/crunchy-labs/crunchy-cli/issues/32) for more
|
||||||
|
/// information.
|
||||||
|
fn get_video_length(path: PathBuf) -> Result<(u32, u32, u32, u32)> {
|
||||||
|
let video_length = Regex::new(r"Duration:\s?(\d+):(\d+):(\d+).(\d+),")?;
|
||||||
|
|
||||||
|
let ffmpeg = Command::new("ffmpeg")
|
||||||
|
.stdout(Stdio::null())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.arg("-y")
|
||||||
|
.args(["-i", path.to_str().unwrap()])
|
||||||
|
.output()?;
|
||||||
|
let ffmpeg_output = String::from_utf8(ffmpeg.stderr)?;
|
||||||
|
let caps = video_length.captures(ffmpeg_output.as_str()).unwrap();
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
caps[1].parse()?,
|
||||||
|
caps[2].parse()?,
|
||||||
|
caps[3].parse()?,
|
||||||
|
caps[4].parse()?,
|
||||||
|
))
|
||||||
|
}
|
||||||
452
crunchy-cli-core/src/cli/download.rs
Normal file
452
crunchy-cli-core/src/cli/download.rs
Normal file
|
|
@ -0,0 +1,452 @@
|
||||||
|
use crate::cli::log::tab_info;
|
||||||
|
use crate::cli::utils::{download_segments, find_resolution};
|
||||||
|
use crate::utils::context::Context;
|
||||||
|
use crate::utils::format::{format_string, Format};
|
||||||
|
use crate::utils::log::progress;
|
||||||
|
use crate::utils::os::{free_file, has_ffmpeg};
|
||||||
|
use crate::utils::parse::{parse_url, UrlFilter};
|
||||||
|
use crate::utils::sort::{sort_formats_after_seasons, sort_seasons_after_number};
|
||||||
|
use crate::Execute;
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use crunchyroll_rs::media::{Resolution, VariantSegment};
|
||||||
|
use crunchyroll_rs::{
|
||||||
|
Episode, Locale, Media, MediaCollection, Movie, MovieListing, Season, Series,
|
||||||
|
};
|
||||||
|
use log::{debug, error, info};
|
||||||
|
use std::fs::File;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::process::{Command, Stdio};
|
||||||
|
|
||||||
|
#[derive(Debug, clap::Parser)]
|
||||||
|
#[clap(about = "Download a video")]
|
||||||
|
#[command(arg_required_else_help(true))]
|
||||||
|
pub struct Download {
|
||||||
|
#[arg(help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
|
||||||
|
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
|
#[arg(long_help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
|
||||||
|
Available languages are:\n{}", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
||||||
|
#[arg(short, long, default_value_t = crate::utils::locale::system_locale())]
|
||||||
|
audio: Locale,
|
||||||
|
#[arg(help = format!("Subtitle language. Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
|
#[arg(long_help = format!("Subtitle language. If set, the subtitle will be burned into the video and cannot be disabled. \
|
||||||
|
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||||
|
#[arg(short, long)]
|
||||||
|
subtitle: Option<Locale>,
|
||||||
|
|
||||||
|
#[arg(help = "Name of the output file")]
|
||||||
|
#[arg(long_help = "Name of the output file.\
|
||||||
|
If you use one of the following pattern they will get replaced:\n \
|
||||||
|
{title} → Title of the video\n \
|
||||||
|
{series_name} → Name of the series\n \
|
||||||
|
{season_name} → Name of the season\n \
|
||||||
|
{audio} → Audio language of the video\n \
|
||||||
|
{resolution} → Resolution of the video\n \
|
||||||
|
{season_number} → Number of the season\n \
|
||||||
|
{episode_number} → Number of the episode\n \
|
||||||
|
{series_id} → ID of the series\n \
|
||||||
|
{season_id} → ID of the season\n \
|
||||||
|
{episode_id} → ID of the episode")]
|
||||||
|
#[arg(short, long, default_value = "{title}.ts")]
|
||||||
|
output: String,
|
||||||
|
|
||||||
|
#[arg(help = "Video resolution")]
|
||||||
|
#[arg(long_help = "The video resolution.\
|
||||||
|
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
|
||||||
|
Specifying the exact pixels is not recommended, use one of the other options instead. \
|
||||||
|
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
|
||||||
|
The available common-use words are 'best' (choose the best resolution available) and 'worst' (worst resolution available)")]
|
||||||
|
#[arg(short, long, default_value = "best")]
|
||||||
|
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
|
||||||
|
resolution: Resolution,
|
||||||
|
|
||||||
|
#[arg(help = "Url(s) to Crunchyroll episodes or series")]
|
||||||
|
urls: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl Execute for Download {
|
||||||
|
async fn execute(self, ctx: Context) -> Result<()> {
|
||||||
|
let mut parsed_urls = vec![];
|
||||||
|
|
||||||
|
for (i, url) in self.urls.iter().enumerate() {
|
||||||
|
let _progress_handler = progress!("Parsing url {}", i + 1);
|
||||||
|
match parse_url(&ctx.crunchy, url.clone(), true).await {
|
||||||
|
Ok((media_collection, url_filter)) => {
|
||||||
|
parsed_urls.push((media_collection, url_filter));
|
||||||
|
info!("Parsed url {}", i + 1)
|
||||||
|
}
|
||||||
|
Err(e) => bail!("url {} could not be parsed: {}", url, e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
|
||||||
|
let _progress_handler = progress!("Fetching series details");
|
||||||
|
let formats = match media_collection {
|
||||||
|
MediaCollection::Series(series) => {
|
||||||
|
debug!("Url {} is series ({})", i + 1, series.title);
|
||||||
|
formats_from_series(&self, series, &url_filter).await?
|
||||||
|
}
|
||||||
|
MediaCollection::Season(season) => {
|
||||||
|
debug!(
|
||||||
|
"Url {} is season {} ({})",
|
||||||
|
i + 1,
|
||||||
|
season.metadata.season_number,
|
||||||
|
season.title
|
||||||
|
);
|
||||||
|
formats_from_season(&self, season, &url_filter).await?
|
||||||
|
}
|
||||||
|
MediaCollection::Episode(episode) => {
|
||||||
|
debug!(
|
||||||
|
"Url {} is episode {} ({}) of season {} ({}) of {}",
|
||||||
|
i + 1,
|
||||||
|
episode.metadata.episode_number,
|
||||||
|
episode.title,
|
||||||
|
episode.metadata.season_number,
|
||||||
|
episode.metadata.season_title,
|
||||||
|
episode.metadata.series_title
|
||||||
|
);
|
||||||
|
format_from_episode(&self, episode, &url_filter, false)
|
||||||
|
.await?
|
||||||
|
.map(|fmt| vec![fmt])
|
||||||
|
}
|
||||||
|
MediaCollection::MovieListing(movie_listing) => {
|
||||||
|
debug!("Url {} is movie listing ({})", i + 1, movie_listing.title);
|
||||||
|
format_from_movie_listing(&self, movie_listing, &url_filter).await?
|
||||||
|
}
|
||||||
|
MediaCollection::Movie(movie) => {
|
||||||
|
debug!("Url {} is movie ({})", i + 1, movie.title);
|
||||||
|
format_from_movie(&self, movie, &url_filter)
|
||||||
|
.await?
|
||||||
|
.map(|fmt| vec![fmt])
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(formats) = formats else {
|
||||||
|
info!("Skipping url {} (no matching episodes found)", i + 1);
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
info!("Loaded series information for url {}", i + 1);
|
||||||
|
drop(_progress_handler);
|
||||||
|
|
||||||
|
if log::max_level() == log::Level::Debug {
|
||||||
|
let seasons = sort_formats_after_seasons(formats.clone());
|
||||||
|
debug!("Series has {} seasons", seasons.len());
|
||||||
|
for (i, season) in seasons.into_iter().enumerate() {
|
||||||
|
info!("Season {} ({})", i + 1, season.get(0).unwrap().season_title);
|
||||||
|
for format in season {
|
||||||
|
info!(
|
||||||
|
"{}: {}px, {:.02} FPS (S{:02}E{:02})",
|
||||||
|
format.title,
|
||||||
|
format.stream.resolution,
|
||||||
|
format.stream.fps,
|
||||||
|
format.season_number,
|
||||||
|
format.number,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for season in sort_formats_after_seasons(formats.clone()) {
|
||||||
|
let first = season.get(0).unwrap();
|
||||||
|
info!(
|
||||||
|
"{} Season {} ({})",
|
||||||
|
first.series_name, first.season_number, first.season_title
|
||||||
|
);
|
||||||
|
|
||||||
|
for (i, format) in season.into_iter().enumerate() {
|
||||||
|
tab_info!(
|
||||||
|
"{}. {} » {}px, {:.2} FPS (S{:02}E{:02})",
|
||||||
|
i + 1,
|
||||||
|
format.title,
|
||||||
|
format.stream.resolution,
|
||||||
|
format.stream.fps,
|
||||||
|
format.season_number,
|
||||||
|
format.number
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for format in formats {
|
||||||
|
let mut path = PathBuf::from(&self.output);
|
||||||
|
path = free_file(
|
||||||
|
path.with_file_name(format_string(
|
||||||
|
if let Some(fname) = path.file_name() {
|
||||||
|
fname.to_str().unwrap()
|
||||||
|
} else {
|
||||||
|
"{title}.ts"
|
||||||
|
}
|
||||||
|
.to_string(),
|
||||||
|
&format,
|
||||||
|
)),
|
||||||
|
)
|
||||||
|
.0;
|
||||||
|
|
||||||
|
let use_ffmpeg = if let Some(extension) = path.extension() {
|
||||||
|
if extension != "ts" {
|
||||||
|
if !has_ffmpeg() {
|
||||||
|
bail!(
|
||||||
|
"File ending is not `.ts`, ffmpeg is required to convert the video"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Downloading {} to '{}'",
|
||||||
|
format.title,
|
||||||
|
path.file_name().unwrap().to_str().unwrap()
|
||||||
|
);
|
||||||
|
tab_info!("Episode: S{:02}E{:02}", format.season_number, format.number);
|
||||||
|
tab_info!("Audio: {}", format.audio);
|
||||||
|
tab_info!(
|
||||||
|
"Subtitles: {}",
|
||||||
|
self.subtitle
|
||||||
|
.clone()
|
||||||
|
.map_or("None".to_string(), |l| l.to_string())
|
||||||
|
);
|
||||||
|
tab_info!("Resolution: {}", format.stream.resolution);
|
||||||
|
tab_info!("FPS: {:.2}", format.stream.fps);
|
||||||
|
|
||||||
|
let segments = format.stream.segments().await?;
|
||||||
|
|
||||||
|
if use_ffmpeg {
|
||||||
|
download_ffmpeg(&ctx, segments, path.as_path()).await?;
|
||||||
|
} else if path.to_str().unwrap() == "-" {
|
||||||
|
let mut stdout = std::io::stdout().lock();
|
||||||
|
download_segments(&ctx, &mut stdout, None, segments).await?;
|
||||||
|
} else {
|
||||||
|
let mut file = File::options().create(true).write(true).open(&path)?;
|
||||||
|
download_segments(&ctx, &mut file, None, segments).await?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download_ffmpeg(
|
||||||
|
ctx: &Context,
|
||||||
|
segments: Vec<VariantSegment>,
|
||||||
|
target: &Path,
|
||||||
|
) -> Result<()> {
|
||||||
|
let ffmpeg = Command::new("ffmpeg")
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stdout(Stdio::null())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.arg("-y")
|
||||||
|
.args(["-f", "mpegts", "-i", "pipe:"])
|
||||||
|
.args(["-safe", "0"])
|
||||||
|
.args(["-c", "copy"])
|
||||||
|
.arg(target.to_str().unwrap())
|
||||||
|
.spawn()?;
|
||||||
|
|
||||||
|
download_segments(ctx, &mut ffmpeg.stdin.unwrap(), None, segments).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn formats_from_series(
|
||||||
|
download: &Download,
|
||||||
|
series: Media<Series>,
|
||||||
|
url_filter: &UrlFilter,
|
||||||
|
) -> Result<Option<Vec<Format>>> {
|
||||||
|
if !series.metadata.audio_locales.is_empty()
|
||||||
|
&& !series.metadata.audio_locales.contains(&download.audio)
|
||||||
|
{
|
||||||
|
error!(
|
||||||
|
"Series {} is not available with {} audio",
|
||||||
|
series.title, download.audio
|
||||||
|
);
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut seasons = series.seasons().await?;
|
||||||
|
|
||||||
|
// filter any season out which does not contain the specified audio language
|
||||||
|
for season in sort_seasons_after_number(seasons.clone()) {
|
||||||
|
// check if the current iterated season has the specified audio language
|
||||||
|
if !season
|
||||||
|
.iter()
|
||||||
|
.any(|s| s.metadata.audio_locale == download.audio)
|
||||||
|
{
|
||||||
|
error!(
|
||||||
|
"Season {} of series {} is not available with {} audio",
|
||||||
|
season.first().unwrap().metadata.season_number,
|
||||||
|
series.title,
|
||||||
|
download.audio
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove all seasons with the wrong audio for the current iterated season number
|
||||||
|
seasons.retain(|s| {
|
||||||
|
s.metadata.season_number != season.first().unwrap().metadata.season_number
|
||||||
|
|| s.metadata.audio_locale == download.audio
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut formats = vec![];
|
||||||
|
for season in seasons {
|
||||||
|
if let Some(fmts) = formats_from_season(download, season, url_filter).await? {
|
||||||
|
formats.extend(fmts)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(some_vec_or_none(formats))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn formats_from_season(
|
||||||
|
download: &Download,
|
||||||
|
season: Media<Season>,
|
||||||
|
url_filter: &UrlFilter,
|
||||||
|
) -> Result<Option<Vec<Format>>> {
|
||||||
|
if season.metadata.audio_locale != download.audio {
|
||||||
|
error!(
|
||||||
|
"Season {} ({}) is not available with {} audio",
|
||||||
|
season.metadata.season_number, season.title, download.audio
|
||||||
|
);
|
||||||
|
return Ok(None);
|
||||||
|
} else if !url_filter.is_season_valid(season.metadata.season_number) {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut formats = vec![];
|
||||||
|
|
||||||
|
for episode in season.episodes().await? {
|
||||||
|
if let Some(fmt) = format_from_episode(download, episode, url_filter, true).await? {
|
||||||
|
formats.push(fmt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(some_vec_or_none(formats))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn format_from_episode(
|
||||||
|
download: &Download,
|
||||||
|
episode: Media<Episode>,
|
||||||
|
url_filter: &UrlFilter,
|
||||||
|
filter_audio: bool,
|
||||||
|
) -> Result<Option<Format>> {
|
||||||
|
if filter_audio && episode.metadata.audio_locale != download.audio {
|
||||||
|
error!(
|
||||||
|
"Episode {} ({}) of season {} ({}) of {} has no {} audio",
|
||||||
|
episode.metadata.episode_number,
|
||||||
|
episode.title,
|
||||||
|
episode.metadata.season_number,
|
||||||
|
episode.metadata.season_title,
|
||||||
|
episode.metadata.series_title,
|
||||||
|
download.audio
|
||||||
|
);
|
||||||
|
return Ok(None);
|
||||||
|
} else if !url_filter.is_episode_valid(
|
||||||
|
episode.metadata.episode_number,
|
||||||
|
episode.metadata.season_number,
|
||||||
|
) {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let streams = episode.streams().await?;
|
||||||
|
let streaming_data = if let Some(subtitle) = &download.subtitle {
|
||||||
|
if !streams.subtitles.keys().cloned().any(|x| &x == subtitle) {
|
||||||
|
error!(
|
||||||
|
"Episode {} ({}) of season {} ({}) of {} has no {} subtitles",
|
||||||
|
episode.metadata.episode_number,
|
||||||
|
episode.title,
|
||||||
|
episode.metadata.season_number,
|
||||||
|
episode.metadata.season_title,
|
||||||
|
episode.metadata.series_title,
|
||||||
|
subtitle
|
||||||
|
);
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
streams.streaming_data(Some(subtitle.clone())).await?
|
||||||
|
} else {
|
||||||
|
streams.streaming_data(None).await?
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(stream) = find_resolution(streaming_data, &download.resolution) else {
|
||||||
|
bail!(
|
||||||
|
"Resolution ({}x{}) is not available for episode {} ({}) of season {} ({}) of {}",
|
||||||
|
download.resolution.width,
|
||||||
|
download.resolution.height,
|
||||||
|
episode.metadata.episode_number,
|
||||||
|
episode.title,
|
||||||
|
episode.metadata.season_number,
|
||||||
|
episode.metadata.season_title,
|
||||||
|
episode.metadata.series_title
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Some(Format::new_from_episode(episode, stream)))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn format_from_movie_listing(
|
||||||
|
download: &Download,
|
||||||
|
movie_listing: Media<MovieListing>,
|
||||||
|
url_filter: &UrlFilter,
|
||||||
|
) -> Result<Option<Vec<Format>>> {
|
||||||
|
let mut formats = vec![];
|
||||||
|
|
||||||
|
for movie in movie_listing.movies().await? {
|
||||||
|
if let Some(fmt) = format_from_movie(download, movie, url_filter).await? {
|
||||||
|
formats.push(fmt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(some_vec_or_none(formats))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn format_from_movie(
|
||||||
|
download: &Download,
|
||||||
|
movie: Media<Movie>,
|
||||||
|
_: &UrlFilter,
|
||||||
|
) -> Result<Option<Format>> {
|
||||||
|
let streams = movie.streams().await?;
|
||||||
|
let mut streaming_data = if let Some(subtitle) = &download.subtitle {
|
||||||
|
if !streams.subtitles.keys().cloned().any(|x| &x == subtitle) {
|
||||||
|
error!("Movie {} has no {} subtitles", movie.title, subtitle);
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
streams.streaming_data(Some(subtitle.clone())).await?
|
||||||
|
} else {
|
||||||
|
streams.streaming_data(None).await?
|
||||||
|
};
|
||||||
|
|
||||||
|
streaming_data.sort_by(|a, b| a.resolution.width.cmp(&b.resolution.width).reverse());
|
||||||
|
let stream = {
|
||||||
|
match download.resolution.height {
|
||||||
|
u64::MAX => streaming_data.into_iter().next().unwrap(),
|
||||||
|
u64::MIN => streaming_data.into_iter().last().unwrap(),
|
||||||
|
_ => {
|
||||||
|
if let Some(streaming_data) = streaming_data.into_iter().find(|v| {
|
||||||
|
download.resolution.height == u64::MAX
|
||||||
|
|| v.resolution.height == download.resolution.height
|
||||||
|
}) {
|
||||||
|
streaming_data
|
||||||
|
} else {
|
||||||
|
bail!(
|
||||||
|
"Resolution ({}x{}) is not available for movie {}",
|
||||||
|
download.resolution.width,
|
||||||
|
download.resolution.height,
|
||||||
|
movie.title
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Some(Format::new_from_movie(movie, stream)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn some_vec_or_none<T>(v: Vec<T>) -> Option<Vec<T>> {
|
||||||
|
if v.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
197
crunchy-cli-core/src/cli/log.rs
Normal file
197
crunchy-cli-core/src/cli/log.rs
Normal file
|
|
@ -0,0 +1,197 @@
|
||||||
|
use log::{
|
||||||
|
set_boxed_logger, set_max_level, Level, LevelFilter, Log, Metadata, Record, SetLoggerError,
|
||||||
|
};
|
||||||
|
use std::io::{stdout, Write};
|
||||||
|
use std::sync::{mpsc, Mutex};
|
||||||
|
use std::thread;
|
||||||
|
use std::thread::JoinHandle;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
struct CliProgress {
|
||||||
|
handler: JoinHandle<()>,
|
||||||
|
sender: mpsc::SyncSender<(String, Level)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CliProgress {
|
||||||
|
fn new(record: &Record) -> Self {
|
||||||
|
let (tx, rx) = mpsc::sync_channel(1);
|
||||||
|
|
||||||
|
let init_message = format!("{}", record.args());
|
||||||
|
let init_level = record.level();
|
||||||
|
let handler = thread::spawn(move || {
|
||||||
|
let states = ["-", "\\", "|", "/"];
|
||||||
|
|
||||||
|
let mut old_message = init_message.clone();
|
||||||
|
let mut latest_info_message = init_message;
|
||||||
|
let mut old_level = init_level;
|
||||||
|
for i in 0.. {
|
||||||
|
let (msg, level) = match rx.try_recv() {
|
||||||
|
Ok(payload) => payload,
|
||||||
|
Err(e) => match e {
|
||||||
|
mpsc::TryRecvError::Empty => (old_message.clone(), old_level),
|
||||||
|
mpsc::TryRecvError::Disconnected => break,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// clear last line
|
||||||
|
// prefix (2), space (1), state (1), space (1), message(n)
|
||||||
|
let _ = write!(stdout(), "\r {}", " ".repeat(old_message.len()));
|
||||||
|
|
||||||
|
if old_level != level || old_message != msg {
|
||||||
|
if old_level <= Level::Warn {
|
||||||
|
let _ = writeln!(stdout(), "\r:: • {}", old_message);
|
||||||
|
} else if old_level == Level::Info && level <= Level::Warn {
|
||||||
|
let _ = writeln!(stdout(), "\r:: → {}", old_message);
|
||||||
|
} else if level == Level::Info {
|
||||||
|
latest_info_message = msg.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = write!(
|
||||||
|
stdout(),
|
||||||
|
"\r:: {} {}",
|
||||||
|
states[i / 2 % states.len()],
|
||||||
|
if level == Level::Info {
|
||||||
|
&msg
|
||||||
|
} else {
|
||||||
|
&latest_info_message
|
||||||
|
}
|
||||||
|
);
|
||||||
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
old_message = msg;
|
||||||
|
old_level = level;
|
||||||
|
|
||||||
|
thread::sleep(Duration::from_millis(100));
|
||||||
|
}
|
||||||
|
|
||||||
|
// clear last line
|
||||||
|
// prefix (2), space (1), state (1), space (1), message(n)
|
||||||
|
let _ = write!(stdout(), "\r {}", " ".repeat(old_message.len()));
|
||||||
|
let _ = writeln!(stdout(), "\r:: ✓ {}", old_message);
|
||||||
|
let _ = stdout().flush();
|
||||||
|
});
|
||||||
|
|
||||||
|
Self {
|
||||||
|
handler,
|
||||||
|
sender: tx,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn send(&self, record: &Record) {
|
||||||
|
let _ = self
|
||||||
|
.sender
|
||||||
|
.send((format!("{}", record.args()), record.level()));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stop(self) {
|
||||||
|
drop(self.sender);
|
||||||
|
let _ = self.handler.join();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
pub struct CliLogger {
|
||||||
|
level: LevelFilter,
|
||||||
|
progress: Mutex<Option<CliProgress>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Log for CliLogger {
|
||||||
|
fn enabled(&self, metadata: &Metadata) -> bool {
|
||||||
|
metadata.level() <= self.level
|
||||||
|
}
|
||||||
|
|
||||||
|
fn log(&self, record: &Record) {
|
||||||
|
if !self.enabled(record.metadata())
|
||||||
|
|| (record.target() != "progress"
|
||||||
|
&& record.target() != "progress_end"
|
||||||
|
&& !record.target().starts_with("crunchy_cli"))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.level >= LevelFilter::Debug {
|
||||||
|
self.extended(record);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
match record.target() {
|
||||||
|
"progress" => self.progress(record, false),
|
||||||
|
"progress_end" => self.progress(record, true),
|
||||||
|
_ => {
|
||||||
|
if self.progress.lock().unwrap().is_some() {
|
||||||
|
self.progress(record, false);
|
||||||
|
} else if record.level() > Level::Warn {
|
||||||
|
self.normal(record)
|
||||||
|
} else {
|
||||||
|
self.error(record)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&self) {
|
||||||
|
let _ = stdout().flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CliLogger {
|
||||||
|
pub fn new(level: LevelFilter) -> Self {
|
||||||
|
Self {
|
||||||
|
level,
|
||||||
|
progress: Mutex::new(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init(level: LevelFilter) -> Result<(), SetLoggerError> {
|
||||||
|
set_max_level(level);
|
||||||
|
set_boxed_logger(Box::new(CliLogger::new(level)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extended(&self, record: &Record) {
|
||||||
|
println!(
|
||||||
|
"[{}] {} {} ({}) {}",
|
||||||
|
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S"),
|
||||||
|
record.level(),
|
||||||
|
// replace the 'progress' prefix if this function is invoked via 'progress!'
|
||||||
|
record
|
||||||
|
.target()
|
||||||
|
.replacen("progress", "crunchy_cli", 1)
|
||||||
|
.replacen("progress_end", "crunchy_cli", 1),
|
||||||
|
format!("{:?}", thread::current().id())
|
||||||
|
.replace("ThreadId(", "")
|
||||||
|
.replace(')', ""),
|
||||||
|
record.args()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normal(&self, record: &Record) {
|
||||||
|
println!(":: {}", record.args())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn error(&self, record: &Record) {
|
||||||
|
eprintln!(":: {}", record.args())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn progress(&self, record: &Record, stop: bool) {
|
||||||
|
let mut progress_option = self.progress.lock().unwrap();
|
||||||
|
if stop && progress_option.is_some() {
|
||||||
|
progress_option.take().unwrap().stop()
|
||||||
|
} else if let Some(p) = &*progress_option {
|
||||||
|
p.send(record);
|
||||||
|
} else {
|
||||||
|
*progress_option = Some(CliProgress::new(record))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! tab_info {
|
||||||
|
($($arg:tt)+) => {
|
||||||
|
if log::max_level() == log::LevelFilter::Debug {
|
||||||
|
info!($($arg)+)
|
||||||
|
} else {
|
||||||
|
info!("\t{}", format!($($arg)+))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub(crate) use tab_info;
|
||||||
39
crunchy-cli-core/src/cli/login.rs
Normal file
39
crunchy-cli-core/src/cli/login.rs
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
use crate::utils::context::Context;
|
||||||
|
use crate::Execute;
|
||||||
|
use anyhow::bail;
|
||||||
|
use anyhow::Result;
|
||||||
|
use crunchyroll_rs::crunchyroll::SessionToken;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Debug, clap::Parser)]
|
||||||
|
#[clap(about = "Save your login credentials persistent on disk")]
|
||||||
|
pub struct Login {
|
||||||
|
#[arg(help = "Remove your stored credentials (instead of save them)")]
|
||||||
|
#[arg(long)]
|
||||||
|
pub remove: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl Execute for Login {
|
||||||
|
async fn execute(self, ctx: Context) -> Result<()> {
|
||||||
|
if let Some(login_file_path) = login_file_path() {
|
||||||
|
match ctx.crunchy.session_token().await {
|
||||||
|
SessionToken::RefreshToken(refresh_token) => Ok(fs::write(
|
||||||
|
login_file_path,
|
||||||
|
format!("refresh_token:{}", refresh_token),
|
||||||
|
)?),
|
||||||
|
SessionToken::EtpRt(etp_rt) => {
|
||||||
|
Ok(fs::write(login_file_path, format!("etp_rt:{}", etp_rt))?)
|
||||||
|
}
|
||||||
|
SessionToken::Anonymous => bail!("Anonymous login cannot be saved"),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
bail!("Cannot find config path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn login_file_path() -> Option<PathBuf> {
|
||||||
|
dirs::config_dir().map(|config_dir| config_dir.join(".crunchy-cli-core"))
|
||||||
|
}
|
||||||
5
crunchy-cli-core/src/cli/mod.rs
Normal file
5
crunchy-cli-core/src/cli/mod.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
pub mod archive;
|
||||||
|
pub mod download;
|
||||||
|
pub mod log;
|
||||||
|
pub mod login;
|
||||||
|
mod utils;
|
||||||
178
crunchy-cli-core/src/cli/utils.rs
Normal file
178
crunchy-cli-core/src/cli/utils.rs
Normal file
|
|
@ -0,0 +1,178 @@
|
||||||
|
use crate::utils::context::Context;
|
||||||
|
use anyhow::Result;
|
||||||
|
use crunchyroll_rs::media::{Resolution, VariantData, VariantSegment};
|
||||||
|
use isahc::AsyncReadResponseExt;
|
||||||
|
use log::{debug, LevelFilter};
|
||||||
|
use std::borrow::{Borrow, BorrowMut};
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::io;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::sync::{mpsc, Arc, Mutex};
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio::task::JoinSet;
|
||||||
|
|
||||||
|
pub fn find_resolution(
|
||||||
|
mut streaming_data: Vec<VariantData>,
|
||||||
|
resolution: &Resolution,
|
||||||
|
) -> Option<VariantData> {
|
||||||
|
streaming_data.sort_by(|a, b| a.resolution.width.cmp(&b.resolution.width).reverse());
|
||||||
|
match resolution.height {
|
||||||
|
u64::MAX => Some(streaming_data.into_iter().next().unwrap()),
|
||||||
|
u64::MIN => Some(streaming_data.into_iter().last().unwrap()),
|
||||||
|
_ => streaming_data
|
||||||
|
.into_iter()
|
||||||
|
.find(|v| resolution.height == u64::MAX || v.resolution.height == resolution.height),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn download_segments(
|
||||||
|
ctx: &Context,
|
||||||
|
writer: &mut impl Write,
|
||||||
|
message: Option<String>,
|
||||||
|
segments: Vec<VariantSegment>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let total_segments = segments.len();
|
||||||
|
|
||||||
|
let client = Arc::new(ctx.client.clone());
|
||||||
|
let count = Arc::new(Mutex::new(0));
|
||||||
|
let amount = Arc::new(Mutex::new(0));
|
||||||
|
|
||||||
|
// only print progress when log level is info
|
||||||
|
let output_handler = if log::max_level() == LevelFilter::Info {
|
||||||
|
let output_count = count.clone();
|
||||||
|
let output_amount = amount.clone();
|
||||||
|
Some(tokio::spawn(async move {
|
||||||
|
let sleep_time_ms = 100;
|
||||||
|
let iter_per_sec = 1000f64 / sleep_time_ms as f64;
|
||||||
|
|
||||||
|
let mut bytes_start = 0f64;
|
||||||
|
let mut speed = 0f64;
|
||||||
|
let mut percentage = 0f64;
|
||||||
|
|
||||||
|
while *output_count.lock().unwrap() < total_segments || percentage < 100f64 {
|
||||||
|
let tmp_amount = *output_amount.lock().unwrap() as f64;
|
||||||
|
|
||||||
|
let tmp_speed = (tmp_amount - bytes_start) / 1024f64 / 1024f64;
|
||||||
|
if *output_count.lock().unwrap() < 3 {
|
||||||
|
speed = tmp_speed;
|
||||||
|
} else {
|
||||||
|
let (old_speed_ratio, new_speed_ratio) = if iter_per_sec <= 1f64 {
|
||||||
|
(0f64, 1f64)
|
||||||
|
} else {
|
||||||
|
(1f64 - (1f64 / iter_per_sec), (1f64 / iter_per_sec))
|
||||||
|
};
|
||||||
|
|
||||||
|
// calculate the average download speed "smoother"
|
||||||
|
speed = (speed * old_speed_ratio) + (tmp_speed * new_speed_ratio);
|
||||||
|
}
|
||||||
|
|
||||||
|
percentage =
|
||||||
|
(*output_count.lock().unwrap() as f64 / total_segments as f64) * 100f64;
|
||||||
|
|
||||||
|
let size = terminal_size::terminal_size()
|
||||||
|
.unwrap_or((terminal_size::Width(60), terminal_size::Height(0)))
|
||||||
|
.0
|
||||||
|
.0 as usize;
|
||||||
|
|
||||||
|
let progress_available = size
|
||||||
|
- if let Some(msg) = &message {
|
||||||
|
35 + msg.len()
|
||||||
|
} else {
|
||||||
|
33
|
||||||
|
};
|
||||||
|
let progress_done_count =
|
||||||
|
(progress_available as f64 * (percentage / 100f64)).ceil() as usize;
|
||||||
|
let progress_to_do_count = progress_available - progress_done_count;
|
||||||
|
|
||||||
|
let _ = write!(
|
||||||
|
io::stdout(),
|
||||||
|
"\r:: {}{:>5.1} MiB {:>5.2} MiB/s [{}{}] {:>3}%",
|
||||||
|
message.clone().map_or("".to_string(), |msg| msg + " "),
|
||||||
|
tmp_amount / 1024f64 / 1024f64,
|
||||||
|
speed * iter_per_sec,
|
||||||
|
"#".repeat(progress_done_count),
|
||||||
|
"-".repeat(progress_to_do_count),
|
||||||
|
percentage as usize
|
||||||
|
);
|
||||||
|
|
||||||
|
bytes_start = tmp_amount;
|
||||||
|
|
||||||
|
tokio::time::sleep(Duration::from_millis(sleep_time_ms)).await;
|
||||||
|
}
|
||||||
|
println!()
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let cpus = num_cpus::get();
|
||||||
|
let mut segs: Vec<Vec<VariantSegment>> = Vec::with_capacity(cpus);
|
||||||
|
for _ in 0..cpus {
|
||||||
|
segs.push(vec![])
|
||||||
|
}
|
||||||
|
for (i, segment) in segments.into_iter().enumerate() {
|
||||||
|
segs[i - ((i / cpus) * cpus)].push(segment);
|
||||||
|
}
|
||||||
|
|
||||||
|
let (sender, receiver) = mpsc::channel();
|
||||||
|
|
||||||
|
let mut join_set: JoinSet<Result<()>> = JoinSet::new();
|
||||||
|
for num in 0..cpus {
|
||||||
|
let thread_client = client.clone();
|
||||||
|
let thread_sender = sender.clone();
|
||||||
|
let thread_segments = segs.remove(0);
|
||||||
|
let thread_amount = amount.clone();
|
||||||
|
let thread_count = count.clone();
|
||||||
|
join_set.spawn(async move {
|
||||||
|
for (i, segment) in thread_segments.into_iter().enumerate() {
|
||||||
|
let mut response = thread_client.get_async(&segment.url).await?;
|
||||||
|
let mut buf = response.bytes().await?.to_vec();
|
||||||
|
|
||||||
|
*thread_amount.lock().unwrap() += buf.len();
|
||||||
|
|
||||||
|
buf = VariantSegment::decrypt(buf.borrow_mut(), segment.key)?.to_vec();
|
||||||
|
debug!(
|
||||||
|
"Downloaded and decrypted segment {} ({})",
|
||||||
|
num + (i * cpus),
|
||||||
|
segment.url
|
||||||
|
);
|
||||||
|
thread_sender.send((num + (i * cpus), buf))?;
|
||||||
|
|
||||||
|
*thread_count.lock().unwrap() += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut data_pos = 0usize;
|
||||||
|
let mut buf: BTreeMap<usize, Vec<u8>> = BTreeMap::new();
|
||||||
|
loop {
|
||||||
|
// is always `Some` because `sender` does not get dropped when all threads are finished
|
||||||
|
let data = receiver.recv().unwrap();
|
||||||
|
|
||||||
|
if data_pos == data.0 {
|
||||||
|
writer.write_all(data.1.borrow())?;
|
||||||
|
data_pos += 1;
|
||||||
|
} else {
|
||||||
|
buf.insert(data.0, data.1);
|
||||||
|
}
|
||||||
|
while let Some(b) = buf.remove(&data_pos) {
|
||||||
|
writer.write_all(b.borrow())?;
|
||||||
|
data_pos += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if *count.lock().unwrap() >= total_segments {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
while let Some(joined) = join_set.join_next().await {
|
||||||
|
joined??
|
||||||
|
}
|
||||||
|
if let Some(handler) = output_handler {
|
||||||
|
handler.await?
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
196
crunchy-cli-core/src/lib.rs
Normal file
196
crunchy-cli-core/src/lib.rs
Normal file
|
|
@ -0,0 +1,196 @@
|
||||||
|
use crate::cli::log::CliLogger;
|
||||||
|
use crate::utils::context::Context;
|
||||||
|
use crate::utils::locale::system_locale;
|
||||||
|
use crate::utils::log::progress;
|
||||||
|
use anyhow::bail;
|
||||||
|
use anyhow::Result;
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
use crunchyroll_rs::{Crunchyroll, Locale};
|
||||||
|
use log::{debug, error, info, LevelFilter};
|
||||||
|
use std::{env, fs};
|
||||||
|
|
||||||
|
mod cli;
|
||||||
|
mod utils;
|
||||||
|
|
||||||
|
pub use cli::{archive::Archive, download::Download, login::Login};
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
trait Execute {
|
||||||
|
async fn execute(self, ctx: Context) -> Result<()>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
#[clap(author, version, about)]
|
||||||
|
#[clap(name = "crunchy-cli")]
|
||||||
|
pub struct Cli {
|
||||||
|
#[clap(flatten)]
|
||||||
|
verbosity: Option<Verbosity>,
|
||||||
|
|
||||||
|
#[arg(help = "Overwrite the language in which results are returned. Default is your system language")]
|
||||||
|
#[arg(long)]
|
||||||
|
lang: Option<Locale>,
|
||||||
|
|
||||||
|
#[clap(flatten)]
|
||||||
|
login_method: LoginMethod,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
command: Command,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
enum Command {
|
||||||
|
Archive(Archive),
|
||||||
|
Download(Download),
|
||||||
|
Login(Login),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct Verbosity {
|
||||||
|
#[arg(help = "Verbose output")]
|
||||||
|
#[arg(short)]
|
||||||
|
v: bool,
|
||||||
|
|
||||||
|
#[arg(help = "Quiet output. Does not print anything unless it's a error")]
|
||||||
|
#[arg(long_help = "Quiet output. Does not print anything unless it's a error. Can be helpful if you pipe the output to stdout")]
|
||||||
|
#[arg(short)]
|
||||||
|
q: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct LoginMethod {
|
||||||
|
#[arg(help = "Login with credentials (username or email and password)")]
|
||||||
|
#[arg(long_help = "Login with credentials (username or email and password). Must be provided as user:password")]
|
||||||
|
#[arg(long)]
|
||||||
|
credentials: Option<String>,
|
||||||
|
#[arg(help = "Login with the etp-rt cookie")]
|
||||||
|
#[arg(long_help = "Login with the etp-rt cookie. This can be obtained when you login on crunchyroll.com and extract it from there")]
|
||||||
|
#[arg(long)]
|
||||||
|
etp_rt: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn cli_entrypoint() {
|
||||||
|
let cli: Cli = Cli::parse();
|
||||||
|
|
||||||
|
if let Some(verbosity) = &cli.verbosity {
|
||||||
|
if verbosity.v && verbosity.q {
|
||||||
|
eprintln!("Output cannot be verbose ('-v') and quiet ('-q') at the same time");
|
||||||
|
std::process::exit(1)
|
||||||
|
} else if verbosity.v {
|
||||||
|
CliLogger::init(LevelFilter::Debug).unwrap()
|
||||||
|
} else if verbosity.q {
|
||||||
|
CliLogger::init(LevelFilter::Error).unwrap()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
CliLogger::init(LevelFilter::Info).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("cli input: {:?}", cli);
|
||||||
|
|
||||||
|
let ctx = match create_ctx(&cli).await {
|
||||||
|
Ok(ctx) => ctx,
|
||||||
|
Err(e) => {
|
||||||
|
error!("{}", e);
|
||||||
|
std::process::exit(1)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
debug!("Created context");
|
||||||
|
|
||||||
|
ctrlc::set_handler(move || {
|
||||||
|
debug!("Ctrl-c detected");
|
||||||
|
if let Ok(dir) = fs::read_dir(&env::temp_dir()) {
|
||||||
|
for file in dir.flatten() {
|
||||||
|
if file
|
||||||
|
.path()
|
||||||
|
.file_name()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_str()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.starts_with(".crunchy-cli_")
|
||||||
|
{
|
||||||
|
let result = fs::remove_file(file.path());
|
||||||
|
debug!(
|
||||||
|
"Ctrl-c removed temporary file {} {}",
|
||||||
|
file.path().to_string_lossy(),
|
||||||
|
if result.is_ok() {
|
||||||
|
"successfully"
|
||||||
|
} else {
|
||||||
|
"not successfully"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::process::exit(1)
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
debug!("Created ctrl-c handler");
|
||||||
|
|
||||||
|
let result = match cli.command {
|
||||||
|
Command::Archive(archive) => archive.execute(ctx).await,
|
||||||
|
Command::Download(download) => download.execute(ctx).await,
|
||||||
|
Command::Login(login) => {
|
||||||
|
if login.remove {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
login.execute(ctx).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if let Err(err) = result {
|
||||||
|
error!("{}", err);
|
||||||
|
std::process::exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_ctx(cli: &Cli) -> Result<Context> {
|
||||||
|
let crunchy = crunchyroll_session(cli).await?;
|
||||||
|
// TODO: Use crunchy.client() when it's possible
|
||||||
|
// currently crunchy.client() has a cloudflare bypass built-in to access crunchyroll. the servers
|
||||||
|
// where crunchy stores their videos can't handle this bypass and simply refuses to connect
|
||||||
|
let client = isahc::HttpClient::new().unwrap();
|
||||||
|
|
||||||
|
Ok(Context { crunchy, client })
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn crunchyroll_session(cli: &Cli) -> Result<Crunchyroll> {
|
||||||
|
let mut builder = Crunchyroll::builder();
|
||||||
|
builder.locale(cli.lang.clone().unwrap_or_else(system_locale));
|
||||||
|
|
||||||
|
let _progress_handler = progress!("Logging in");
|
||||||
|
if cli.login_method.credentials.is_none() && cli.login_method.etp_rt.is_none() {
|
||||||
|
if let Some(login_file_path) = cli::login::login_file_path() {
|
||||||
|
if login_file_path.exists() {
|
||||||
|
let session = fs::read_to_string(login_file_path)?;
|
||||||
|
if let Some((token_type, token)) = session.split_once(':') {
|
||||||
|
match token_type {
|
||||||
|
"refresh_token" => {
|
||||||
|
return Ok(builder.login_with_refresh_token(token).await?)
|
||||||
|
}
|
||||||
|
"etp_rt" => return Ok(builder.login_with_etp_rt(token).await?),
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bail!("Could not read stored session ('{}')", session)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bail!("Please use a login method ('--credentials' or '--etp_rt')")
|
||||||
|
} else if cli.login_method.credentials.is_some() && cli.login_method.etp_rt.is_some() {
|
||||||
|
bail!("Please use only one login method ('--credentials' or '--etp_rt')")
|
||||||
|
}
|
||||||
|
|
||||||
|
let crunchy = if let Some(credentials) = &cli.login_method.credentials {
|
||||||
|
if let Some((user, password)) = credentials.split_once(':') {
|
||||||
|
builder.login_with_credentials(user, password).await?
|
||||||
|
} else {
|
||||||
|
bail!("Invalid credentials format. Please provide your credentials as user:password")
|
||||||
|
}
|
||||||
|
} else if let Some(etp_rt) = &cli.login_method.etp_rt {
|
||||||
|
builder.login_with_etp_rt(etp_rt).await?
|
||||||
|
} else {
|
||||||
|
bail!("should never happen")
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Logged in");
|
||||||
|
|
||||||
|
Ok(crunchy)
|
||||||
|
}
|
||||||
6
crunchy-cli-core/src/utils/clap.rs
Normal file
6
crunchy-cli-core/src/utils/clap.rs
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
use crate::utils::parse::parse_resolution;
|
||||||
|
use crunchyroll_rs::media::Resolution;
|
||||||
|
|
||||||
|
pub fn clap_parse_resolution(s: &str) -> Result<Resolution, String> {
|
||||||
|
parse_resolution(s.to_string()).map_err(|e| e.to_string())
|
||||||
|
}
|
||||||
6
crunchy-cli-core/src/utils/context.rs
Normal file
6
crunchy-cli-core/src/utils/context.rs
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
use crunchyroll_rs::Crunchyroll;
|
||||||
|
|
||||||
|
pub struct Context {
|
||||||
|
pub crunchy: Crunchyroll,
|
||||||
|
pub client: isahc::HttpClient,
|
||||||
|
}
|
||||||
77
crunchy-cli-core/src/utils/format.rs
Normal file
77
crunchy-cli-core/src/utils/format.rs
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
use crunchyroll_rs::media::VariantData;
|
||||||
|
use crunchyroll_rs::{Episode, Locale, Media, Movie};
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Format {
|
||||||
|
pub id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub number: u32,
|
||||||
|
pub audio: Locale,
|
||||||
|
|
||||||
|
pub duration: Duration,
|
||||||
|
pub stream: VariantData,
|
||||||
|
|
||||||
|
pub series_id: String,
|
||||||
|
pub series_name: String,
|
||||||
|
|
||||||
|
pub season_id: String,
|
||||||
|
pub season_title: String,
|
||||||
|
pub season_number: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Format {
|
||||||
|
pub fn new_from_episode(episode: Media<Episode>, stream: VariantData) -> Self {
|
||||||
|
Self {
|
||||||
|
id: episode.id,
|
||||||
|
title: episode.title,
|
||||||
|
description: episode.description,
|
||||||
|
number: episode.metadata.episode_number,
|
||||||
|
audio: episode.metadata.audio_locale,
|
||||||
|
|
||||||
|
duration: episode.metadata.duration.to_std().unwrap(),
|
||||||
|
stream,
|
||||||
|
|
||||||
|
series_id: episode.metadata.series_id,
|
||||||
|
series_name: episode.metadata.series_title,
|
||||||
|
|
||||||
|
season_id: episode.metadata.season_id,
|
||||||
|
season_title: episode.metadata.season_title,
|
||||||
|
season_number: episode.metadata.season_number,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_from_movie(movie: Media<Movie>, stream: VariantData) -> Self {
|
||||||
|
Self {
|
||||||
|
id: movie.id,
|
||||||
|
title: movie.title,
|
||||||
|
description: movie.description,
|
||||||
|
number: 1,
|
||||||
|
audio: Locale::ja_JP,
|
||||||
|
|
||||||
|
duration: movie.metadata.duration.to_std().unwrap(),
|
||||||
|
stream,
|
||||||
|
|
||||||
|
series_id: movie.metadata.movie_listing_id.clone(),
|
||||||
|
series_name: movie.metadata.movie_listing_title.clone(),
|
||||||
|
|
||||||
|
season_id: movie.metadata.movie_listing_id,
|
||||||
|
season_title: movie.metadata.movie_listing_title,
|
||||||
|
season_number: 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format_string(s: String, format: &Format) -> String {
|
||||||
|
s.replace("{title}", &format.title)
|
||||||
|
.replace("{series_name}", &format.series_name)
|
||||||
|
.replace("{season_name}", &format.season_title)
|
||||||
|
.replace("{audio}", &format.audio.to_string())
|
||||||
|
.replace("{resolution}", &format.stream.resolution.to_string())
|
||||||
|
.replace("{season_number}", &format.season_number.to_string())
|
||||||
|
.replace("{episode_number}", &format.number.to_string())
|
||||||
|
.replace("{series_id}", &format.series_id)
|
||||||
|
.replace("{season_id}", &format.season_id)
|
||||||
|
.replace("{episode_id}", &format.id)
|
||||||
|
}
|
||||||
15
crunchy-cli-core/src/utils/locale.rs
Normal file
15
crunchy-cli-core/src/utils/locale.rs
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
use crunchyroll_rs::Locale;
|
||||||
|
|
||||||
|
/// Return the locale of the system.
|
||||||
|
pub fn system_locale() -> Locale {
|
||||||
|
if let Some(system_locale) = sys_locale::get_locale() {
|
||||||
|
let locale = Locale::from(system_locale);
|
||||||
|
if let Locale::Custom(_) = locale {
|
||||||
|
Locale::en_US
|
||||||
|
} else {
|
||||||
|
locale
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Locale::en_US
|
||||||
|
}
|
||||||
|
}
|
||||||
19
crunchy-cli-core/src/utils/log.rs
Normal file
19
crunchy-cli-core/src/utils/log.rs
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
use log::info;
|
||||||
|
|
||||||
|
pub struct ProgressHandler;
|
||||||
|
|
||||||
|
impl Drop for ProgressHandler {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
info!(target: "progress_end", "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! progress {
|
||||||
|
($($arg:tt)+) => {
|
||||||
|
{
|
||||||
|
log::info!(target: "progress", $($arg)+);
|
||||||
|
$crate::utils::log::ProgressHandler{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub(crate) use progress;
|
||||||
8
crunchy-cli-core/src/utils/mod.rs
Normal file
8
crunchy-cli-core/src/utils/mod.rs
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
pub mod clap;
|
||||||
|
pub mod context;
|
||||||
|
pub mod format;
|
||||||
|
pub mod locale;
|
||||||
|
pub mod log;
|
||||||
|
pub mod os;
|
||||||
|
pub mod parse;
|
||||||
|
pub mod sort;
|
||||||
52
crunchy-cli-core/src/utils/os.rs
Normal file
52
crunchy-cli-core/src/utils/os.rs
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
use log::debug;
|
||||||
|
use std::io::ErrorKind;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::process::Command;
|
||||||
|
use std::{env, io};
|
||||||
|
use tempfile::{Builder, NamedTempFile};
|
||||||
|
|
||||||
|
pub fn has_ffmpeg() -> bool {
|
||||||
|
if let Err(e) = Command::new("ffmpeg").spawn() {
|
||||||
|
if ErrorKind::NotFound != e.kind() {
|
||||||
|
debug!(
|
||||||
|
"unknown error occurred while checking if ffmpeg exists: {}",
|
||||||
|
e.kind()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Any tempfiles should be created with this function. The prefix and directory of every file
|
||||||
|
/// created with this method stays the same which is helpful to query all existing tempfiles and
|
||||||
|
/// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like
|
||||||
|
/// setting the wrong prefix if done manually.
|
||||||
|
pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
|
||||||
|
let tempfile = Builder::default()
|
||||||
|
.prefix(".crunchy-cli_")
|
||||||
|
.suffix(suffix.as_ref())
|
||||||
|
.tempfile_in(&env::temp_dir())?;
|
||||||
|
debug!(
|
||||||
|
"Created temporary file: {}",
|
||||||
|
tempfile.path().to_string_lossy()
|
||||||
|
);
|
||||||
|
Ok(tempfile)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if the given path exists and rename it until the new (renamed) file does not exist.
|
||||||
|
pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
|
||||||
|
let mut i = 0;
|
||||||
|
while path.exists() {
|
||||||
|
i += 1;
|
||||||
|
|
||||||
|
let ext = path.extension().unwrap().to_str().unwrap();
|
||||||
|
let mut filename = path.file_name().unwrap().to_str().unwrap();
|
||||||
|
|
||||||
|
filename = &filename[0..filename.len() - ext.len() - 1];
|
||||||
|
|
||||||
|
path.set_file_name(format!("{} ({}).{}", filename, i, ext))
|
||||||
|
}
|
||||||
|
(path, i != 0)
|
||||||
|
}
|
||||||
170
crunchy-cli-core/src/utils/parse.rs
Normal file
170
crunchy-cli-core/src/utils/parse.rs
Normal file
|
|
@ -0,0 +1,170 @@
|
||||||
|
use anyhow::{anyhow, bail, Result};
|
||||||
|
use crunchyroll_rs::media::Resolution;
|
||||||
|
use crunchyroll_rs::{Crunchyroll, MediaCollection, UrlType};
|
||||||
|
use log::debug;
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
|
/// Define a filter, based on season and episode number to filter episodes / movies.
|
||||||
|
/// If a struct instance equals the [`Default::default()`] it's considered that no filter is applied.
|
||||||
|
/// If `from_*` is [`None`] they're set to [`u32::MIN`].
|
||||||
|
/// If `to_*` is [`None`] they're set to [`u32::MAX`].
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct InnerUrlFilter {
|
||||||
|
from_episode: Option<u32>,
|
||||||
|
to_episode: Option<u32>,
|
||||||
|
from_season: Option<u32>,
|
||||||
|
to_season: Option<u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct UrlFilter {
|
||||||
|
inner: Vec<InnerUrlFilter>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UrlFilter {
|
||||||
|
pub fn is_season_valid(&self, season: u32) -> bool {
|
||||||
|
self.inner.iter().any(|f| {
|
||||||
|
let from_season = f.from_season.unwrap_or(u32::MIN);
|
||||||
|
let to_season = f.to_season.unwrap_or(u32::MAX);
|
||||||
|
|
||||||
|
season >= from_season && season <= to_season
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_episode_valid(&self, episode: u32, season: u32) -> bool {
|
||||||
|
self.inner.iter().any(|f| {
|
||||||
|
let from_episode = f.from_episode.unwrap_or(u32::MIN);
|
||||||
|
let to_episode = f.to_episode.unwrap_or(u32::MAX);
|
||||||
|
let from_season = f.from_season.unwrap_or(u32::MIN);
|
||||||
|
let to_season = f.to_season.unwrap_or(u32::MAX);
|
||||||
|
|
||||||
|
episode >= from_episode
|
||||||
|
&& episode <= to_episode
|
||||||
|
&& season >= from_season
|
||||||
|
&& season <= to_season
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a url and return all [`crunchyroll_rs::Media<crunchyroll_rs::Episode>`] &
|
||||||
|
/// [`crunchyroll_rs::Media<crunchyroll_rs::Movie>`] which could be related to it.
|
||||||
|
///
|
||||||
|
/// The `with_filter` arguments says if filtering should be enabled for the url. Filtering is a
|
||||||
|
/// specific pattern at the end of the url which declares which parts of the url content should be
|
||||||
|
/// returned / filtered (out). _This only works if the url points to a series_.
|
||||||
|
///
|
||||||
|
/// Examples how filtering works:
|
||||||
|
/// - `...[E5]` - Download the fifth episode.
|
||||||
|
/// - `...[S1]` - Download the full first season.
|
||||||
|
/// - `...[-S2]` - Download all seasons up to and including season 2.
|
||||||
|
/// - `...[S3E4-]` - Download all episodes from and including season 3, episode 4.
|
||||||
|
/// - `...[S1E4-S3]` - Download all episodes from and including season 1, episode 4, until andincluding season 3.
|
||||||
|
/// - `...[S3,S5]` - Download episode 3 and 5.
|
||||||
|
/// - `...[S1-S3,S4E2-S4E6]` - Download season 1 to 3 and episode 2 to episode 6 of season 4.
|
||||||
|
|
||||||
|
/// In practice, it would look like this: `https://beta.crunchyroll.com/series/12345678/example[S1E5-S3E2]`.
|
||||||
|
pub async fn parse_url(
|
||||||
|
crunchy: &Crunchyroll,
|
||||||
|
mut url: String,
|
||||||
|
with_filter: bool,
|
||||||
|
) -> Result<(MediaCollection, UrlFilter)> {
|
||||||
|
let url_filter = if with_filter {
|
||||||
|
debug!("Url may contain filters");
|
||||||
|
|
||||||
|
let open_index = url.rfind('[').unwrap_or(0);
|
||||||
|
let close_index = url.rfind(']').unwrap_or(0);
|
||||||
|
|
||||||
|
let filter = if open_index < close_index {
|
||||||
|
let filter = url.as_str()[open_index + 1..close_index].to_string();
|
||||||
|
url = url.as_str()[0..open_index].to_string();
|
||||||
|
filter
|
||||||
|
} else {
|
||||||
|
"".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
let filter_regex = Regex::new(r"((S(?P<from_season>\d+))?(E(?P<from_episode>\d+))?)(((?P<dash>-)((S(?P<to_season>\d+))?(E(?P<to_episode>\d+))?))?)(,|$)").unwrap();
|
||||||
|
|
||||||
|
let mut filters = vec![];
|
||||||
|
|
||||||
|
for capture in filter_regex.captures_iter(&filter) {
|
||||||
|
let dash = capture.name("dash").is_some();
|
||||||
|
let from_episode = capture
|
||||||
|
.name("from_episode")
|
||||||
|
.map_or(anyhow::Ok(None), |fe| Ok(Some(fe.as_str().parse()?)))?;
|
||||||
|
let to_episode = capture
|
||||||
|
.name("to_episode")
|
||||||
|
.map_or(anyhow::Ok(if dash { None } else { from_episode }), |te| {
|
||||||
|
Ok(Some(te.as_str().parse()?))
|
||||||
|
})?;
|
||||||
|
let from_season = capture
|
||||||
|
.name("from_season")
|
||||||
|
.map_or(anyhow::Ok(None), |fs| Ok(Some(fs.as_str().parse()?)))?;
|
||||||
|
let to_season = capture
|
||||||
|
.name("to_season")
|
||||||
|
.map_or(anyhow::Ok(if dash { None } else { from_season }), |ts| {
|
||||||
|
Ok(Some(ts.as_str().parse()?))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
filters.push(InnerUrlFilter {
|
||||||
|
from_episode,
|
||||||
|
to_episode,
|
||||||
|
from_season,
|
||||||
|
to_season,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let url_filter = UrlFilter { inner: filters };
|
||||||
|
|
||||||
|
debug!("Url filter: {:?}", url_filter);
|
||||||
|
|
||||||
|
url_filter
|
||||||
|
} else {
|
||||||
|
UrlFilter::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let parsed_url = crunchyroll_rs::parse_url(url).map_or(Err(anyhow!("Invalid url")), Ok)?;
|
||||||
|
debug!("Url type: {:?}", parsed_url);
|
||||||
|
let media_collection = match parsed_url {
|
||||||
|
UrlType::Series(id) | UrlType::MovieListing(id) | UrlType::EpisodeOrMovie(id) => {
|
||||||
|
crunchy.media_collection_from_id(id).await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((media_collection, url_filter))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a resolution given as a [`String`] to a [`crunchyroll_rs::media::Resolution`].
|
||||||
|
pub fn parse_resolution(mut resolution: String) -> Result<Resolution> {
|
||||||
|
resolution = resolution.to_lowercase();
|
||||||
|
|
||||||
|
if resolution == "best" {
|
||||||
|
Ok(Resolution {
|
||||||
|
width: u64::MAX,
|
||||||
|
height: u64::MAX,
|
||||||
|
})
|
||||||
|
} else if resolution == "worst" {
|
||||||
|
Ok(Resolution {
|
||||||
|
width: u64::MIN,
|
||||||
|
height: u64::MIN,
|
||||||
|
})
|
||||||
|
} else if resolution.ends_with('p') {
|
||||||
|
let without_p = resolution.as_str()[0..resolution.len() - 2]
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| anyhow!("Could not parse resolution"))?;
|
||||||
|
Ok(Resolution {
|
||||||
|
width: without_p * 16 / 9,
|
||||||
|
height: without_p,
|
||||||
|
})
|
||||||
|
} else if let Some((w, h)) = resolution.split_once('x') {
|
||||||
|
Ok(Resolution {
|
||||||
|
width: w
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| anyhow!("Could not parse resolution"))?,
|
||||||
|
height: h
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| anyhow!("Could not parse resolution"))?,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
bail!("Could not parse resolution")
|
||||||
|
}
|
||||||
|
}
|
||||||
47
crunchy-cli-core/src/utils/sort.rs
Normal file
47
crunchy-cli-core/src/utils/sort.rs
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
use crate::utils::format::Format;
|
||||||
|
use crunchyroll_rs::{Media, Season};
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
|
/// Sort seasons after their season number. Crunchyroll may have multiple seasons for one season
|
||||||
|
/// number. They generally store different language in individual seasons with the same season number.
|
||||||
|
/// E.g. series X has one official season but crunchy has translations for it in 3 different languages
|
||||||
|
/// so there exist 3 different "seasons" on Crunchyroll which are actual the same season but with
|
||||||
|
/// different audio.
|
||||||
|
pub fn sort_seasons_after_number(seasons: Vec<Media<Season>>) -> Vec<Vec<Media<Season>>> {
|
||||||
|
let mut as_map = BTreeMap::new();
|
||||||
|
|
||||||
|
for season in seasons {
|
||||||
|
as_map
|
||||||
|
.entry(season.metadata.season_number)
|
||||||
|
.or_insert_with(Vec::new);
|
||||||
|
as_map
|
||||||
|
.get_mut(&season.metadata.season_number)
|
||||||
|
.unwrap()
|
||||||
|
.push(season)
|
||||||
|
}
|
||||||
|
|
||||||
|
as_map.into_values().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sort formats after their seasons and episodes (inside it) ascending. Make sure to have only
|
||||||
|
/// episodes from one series and in one language as argument since the function does not handle those
|
||||||
|
/// differences which could then lead to a semi messed up result.
|
||||||
|
pub fn sort_formats_after_seasons(formats: Vec<Format>) -> Vec<Vec<Format>> {
|
||||||
|
let mut as_map = BTreeMap::new();
|
||||||
|
|
||||||
|
for format in formats {
|
||||||
|
as_map.entry(format.season_number).or_insert_with(Vec::new);
|
||||||
|
as_map.get_mut(&format.season_number).unwrap().push(format);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut sorted = as_map
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, mut values)| {
|
||||||
|
values.sort_by(|a, b| a.number.cmp(&b.number));
|
||||||
|
values
|
||||||
|
})
|
||||||
|
.collect::<Vec<Vec<Format>>>();
|
||||||
|
sorted.sort_by(|a, b| a[0].series_id.cmp(&b[0].series_id));
|
||||||
|
|
||||||
|
sorted
|
||||||
|
}
|
||||||
231
crunchy-cli.1
231
crunchy-cli.1
|
|
@ -1,231 +0,0 @@
|
||||||
.TH crunchy-cli 1 "27 June 2022" "crunchy-cli" "Crunchyroll Cli Client"
|
|
||||||
|
|
||||||
.SH NAME
|
|
||||||
crunchy-cli - A cli for downloading videos and entire series from crunchyroll.
|
|
||||||
|
|
||||||
.SH SYNOPSIS
|
|
||||||
crunchy-cli [\fB-h\fR] [\fB-p\fR \fIPROXY\fR] [\fB-q\fR] [\fB-v\fR]
|
|
||||||
.br
|
|
||||||
crunchy-cli help
|
|
||||||
.br
|
|
||||||
crunchy-cli login [\fB--persistent\fR] [\fB--session-id\fR \fISESSION_ID\fR] [\fIusername\fR, \fIpassword\fR]
|
|
||||||
.br
|
|
||||||
crunchy-cli download [\fB-a\fR \fIAUDIO\fR] [\fB-s\fR \fISUBTITLE\fR] [\fB-d\fR \fIDIRECTORY\fR] [\fB-o\fR \fIOUTPUT\fR] [\fB-r\fR \fIRESOLUTION\fR] [\fB-g\fR \fIGOROUTINES\fR] \fIURLs...\fR
|
|
||||||
.br
|
|
||||||
crunchy-cli archive [\fB-l\fR \fILANGUAGE\fR] [\fB-d\fR \fIDIRECTORY\fR] [\fB-o\fR \fIOUTPUT\fR] [\fB-m\fR \fIMERGE BEHAVIOR\fR] [\fB-c\fR \fICOMPRESS\fR] [\fB-r\fR \fIRESOLUTION\fR] [\fB-g\fR \fIGOROUTINES\fR] \fIURLs...\fR
|
|
||||||
.br
|
|
||||||
crunchy-cli update [\fB-i\fR \fIINSTALL\fR]
|
|
||||||
|
|
||||||
.SH DESCRIPTION
|
|
||||||
.TP
|
|
||||||
With \fBcrunchy-cli\fR you can easily download video and series from crunchyroll.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
Note that you need an \fBcrunchyroll premium\fR account in order to use this tool!
|
|
||||||
|
|
||||||
.SH GENERAL OPTIONS
|
|
||||||
.TP
|
|
||||||
This options can be passed to every action.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-h, --help\fR
|
|
||||||
Shows help.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-p, --proxy PROXY\fR
|
|
||||||
Sets a proxy through which all traffic will be routed.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-q, --quiet\fR
|
|
||||||
Disables all output.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-v, --verbose\fR
|
|
||||||
Shows verbose output.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB--lang\fR
|
|
||||||
Set language to use. If not set, it's received from the system locale dynamically. Choose from: ar-ME, ar-SA, de-DE, en-US, es-419, es-ES, es-LA, fr-FR, it-IT, ja-JP, pt-BR, pt-PT, ru-RU, zh-CN.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB--useragent\fR
|
|
||||||
Useragent to do all request with.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB--version\fR
|
|
||||||
Shows the current cli version.
|
|
||||||
|
|
||||||
.SH LOGIN COMMAND
|
|
||||||
This command logs in to crunchyroll and stores the session id or credentials on the drive. This needs to be done before calling other commands since they need a valid login to operate.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB--persistent\fR
|
|
||||||
Stores the given credentials permanent on the drive. The *nix path for it is $HOME/.config/crunchy.
|
|
||||||
.br
|
|
||||||
NOTE: The credentials are stored in plain text and if you not use \fB--session-id\fR your credentials are used (if you not use the \fB--persistent\fR flag only a session id gets stored regardless if you login with username/password or a session id).
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB--session-id SESSION_ID\fR
|
|
||||||
Login via a session id (which can be extracted from a crunchyroll browser cookie) instead of using username and password.
|
|
||||||
|
|
||||||
.SH DOWNLOAD COMMAND
|
|
||||||
A command to simply download videos. The output file is stored as a \fI.ts\fR file. \fIffmpeg\fR has to be installed if you want to change the Format the videos are stored in.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-a, --audio AUDIO\fR
|
|
||||||
Forces to download videos with the given audio locale. If no video with this audio locale is available, nothing will be downloaded. Available locales are: ja-JP, en-US, es-419, es-LA, es-ES, fr-FR, pt-PT, pt-BR, it-IT, de-DE, ru-RU, ar-SA, ar-ME.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-s, --subtitle SUBTITLE\fR
|
|
||||||
Forces to download the videos with subtitles in the given locale / language. If no video with this subtitle locale is available, nothing will be downloaded. Available locales are: ja-JP, en-US, es-419, es-LA, es-ES, fr-FR, pt-PT, pt-BR, it-IT, de-DE, ru-RU, ar-SA, ar-ME.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-d, --directory DIRECTORY\fR
|
|
||||||
The directory to download all files to.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-o, --output OUTPUT\fR
|
|
||||||
Name of the output file. Formatting is also supported, so if the name contains one or more of the following things, they will get replaced.
|
|
||||||
{title} » Title of the video.
|
|
||||||
{series_name} » Name of the series.
|
|
||||||
{season_name} » Name of the season.
|
|
||||||
{season_number} » Number of the season.
|
|
||||||
{episode_number} » Number of the episode.
|
|
||||||
{resolution} » Resolution of the video.
|
|
||||||
{fps} » Frame Rate of the video.
|
|
||||||
{audio} » Audio locale of the video.
|
|
||||||
{subtitle} » Subtitle locale of the video.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-r, resolution RESOLUTION\fR
|
|
||||||
The video resolution. Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or "common-use" words (e.g. best).
|
|
||||||
Available pixels: 1920x1080, 1280x720, 640x480, 480x360, 426x240.
|
|
||||||
Available abbreviations: 1080p, 720p, 480p, 360p, 240p.
|
|
||||||
Available common-use words: best (best available resolution), worst (worst available resolution).
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-g, --goroutines GOROUTINES\fR
|
|
||||||
Sets the number of parallel downloads for the segments the final video is made of. Default is the number of cores the computer has.
|
|
||||||
|
|
||||||
.SH ARCHIVE COMMAND
|
|
||||||
This command behaves like \fBdownload\fR besides the fact that it requires \fIffmpeg\fR and stores the output only to .mkv files.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-l, --language LANGUAGE\fR
|
|
||||||
Audio locales which should be downloaded. Can be used multiple times. Available locales are: ja-JP, en-US, es-419, es-LA, es-ES, fr-FR, pt-PT, pt-BR, it-IT, de-DE, ru-RU, ar-SA, ar-ME.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-s, --sublang LANGUAGE\fR
|
|
||||||
Subtitle languages to use, by default all are included. Can be used multiple times. Available locales are: ja-JP, en-US, es-419, es-LA, es-ES, fr-FR, pt-PT, pt-BR, it-IT, de-DE, ru-RU, ar-SA, ar-ME.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-d, --directory DIRECTORY\fR
|
|
||||||
The directory to download all files to.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-o, --output OUTPUT\fR
|
|
||||||
Name of the output file. Formatting is also supported, so if the name contains one or more of the following things, they will get replaced.
|
|
||||||
{title} » Title of the video.
|
|
||||||
{series_name} » Name of the series.
|
|
||||||
{season_name} » Name of the season.
|
|
||||||
{season_number} » Number of the season.
|
|
||||||
{episode_number} » Number of the episode.
|
|
||||||
{resolution} » Resolution of the video.
|
|
||||||
{fps} » Frame Rate of the video.
|
|
||||||
{audio} » Audio locale of the video.
|
|
||||||
{subtitle} » Subtitle locale of the video.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-m, --merge MERGE BEHAVIOR\fR
|
|
||||||
Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio', 'video'. \fB--audio\fR stores one video and only the audio of all other languages, \fBvideo\fR stores all videos of the given languages and their audio, \fBauto\fR (which is the default) only behaves like video if the length of two videos are different (and only for the two videos), else like audio.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-c, --compress COMPRESS\fR
|
|
||||||
If is set, all output will be compresses into an archive (every url generates a new one). This flag sets the name of the compressed output file. The file ending specifies the compression algorithm. The following algorithms are supported: gzip, tar, zip.
|
|
||||||
Just like \fB--output\fR the name can be formatted. But the only option available here is \fI{series_name}\fR.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-r, resolution RESOLUTION\fR
|
|
||||||
The video resolution. Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or "common-use" words (e.g. best).
|
|
||||||
Available pixels: 1920x1080, 1280x720, 640x480, 480x360, 426x240.
|
|
||||||
Available abbreviations: 1080p, 720p, 480p, 360p, 240p.
|
|
||||||
Available common-use words: best (best available resolution), worst (worst available resolution).
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-g, --goroutines GOROUTINES\fR
|
|
||||||
Sets the number of parallel downloads for the segments the final video is made of. Default is the number of cores the computer has.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB--no-subtitle-optimizations DISABLE\fR
|
|
||||||
Disable subtitle optimizations which caused subtitle sizing and layout issues (https://github.com/crunchy-labs/crunchy-cli/issues/66).
|
|
||||||
|
|
||||||
.SH UPDATE COMMAND
|
|
||||||
Checks if a newer version is available.
|
|
||||||
.TP
|
|
||||||
|
|
||||||
\fB-i, --install INSTALL\fR
|
|
||||||
If given, the command tries to update the executable with the newer version (if a newer is available).
|
|
||||||
|
|
||||||
.SH URL OPTIONS
|
|
||||||
If you want to download only specific episode of a series, you could either pass every single episode url to the downloader (which is fine for 1 - 3 episodes) or use filtering.
|
|
||||||
It works pretty simple, just put a specific pattern surrounded by square brackets at the end of the url from the anime you want to download. A season and / or episode as well as a range from where to where episodes should be downloaded can be specified.
|
|
||||||
Use the list below to get a better overview what is possible
|
|
||||||
...[E5] - Download the fifth episode.
|
|
||||||
...[S1] - Download the full first season.
|
|
||||||
...[-S2] - Download all seasons up to and including season 2.
|
|
||||||
...[S3E4-] - Download all episodes from and including season 3, episode 4.
|
|
||||||
...[S1E4-S3] - Download all episodes from and including season 1, episode 4, until and including season 3.
|
|
||||||
|
|
||||||
In practise, it would look like this: \fIhttps://www.crunchyroll.com/series/12345678/example[S1E5-S3E2]\fR.
|
|
||||||
|
|
||||||
The \fBS\fR, followed by the number indicates the season number, \fBE\fR, followed by the number indicates an episode number. It doesn't matter if \fBS\fR, \fBE\fR or both are missing. Theoretically \fB[-]\fR is a valid pattern too. Note that \fBS\fR must always stay before \fBE\fR when used.
|
|
||||||
|
|
||||||
.SH EXAMPLES
|
|
||||||
Login via crunchyroll account email and password.
|
|
||||||
.br
|
|
||||||
$ crunchy-cli login user@example.com 12345678
|
|
||||||
|
|
||||||
Download a episode normally. Your system locale will be used for the video's audio.
|
|
||||||
.br
|
|
||||||
$ crunchy-cli download https://www.crunchyroll.com/watch/GRDKJZ81Y/alone-and-lonesome
|
|
||||||
|
|
||||||
Download a episode with 720p and name it to 'darling.mp4'. Note that you need \fBffmpeg\fR to save files which do not have '.ts' as file extension.
|
|
||||||
.br
|
|
||||||
$ crunchy-cli download -o "darling.mp4" -r 720p https://www.crunchyroll.com/watch/GRDKJZ81Y/alone-and-lonesome
|
|
||||||
|
|
||||||
Download a episode with japanese audio and american subtitles.
|
|
||||||
.br
|
|
||||||
$ crunchy-cli download -a ja-JP -s en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx[E3-E5]
|
|
||||||
|
|
||||||
Stores the episode in a .mkv file.
|
|
||||||
.br
|
|
||||||
$ crunchy-cli archive https://www.crunchyroll.com/watch/GRDKJZ81Y/alone-and-lonesome
|
|
||||||
|
|
||||||
Downloads the first two episode of Darling in the FranXX and stores it compressed in a file.
|
|
||||||
.br
|
|
||||||
$ crunchy-cli archive -c "ditf.tar.gz" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx[E1-E2]
|
|
||||||
|
|
||||||
.SH BUGS
|
|
||||||
If you notice any bug or want an enhancement, feel free to create a new issue or pull request in the GitHub repository.
|
|
||||||
|
|
||||||
.SH AUTHOR
|
|
||||||
Crunchy Labs Maintainers
|
|
||||||
.br
|
|
||||||
Source: https://github.com/crunchy-labs/crunchy-cli
|
|
||||||
|
|
||||||
.SH COPYRIGHT
|
|
||||||
Copyright (C) 2022 Crunchy Labs Maintainers
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU General Public License
|
|
||||||
as published by the Free Software Foundation, either version 3
|
|
||||||
of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
14
go.mod
14
go.mod
|
|
@ -1,14 +0,0 @@
|
||||||
module github.com/crunchy-labs/crunchy-cli
|
|
||||||
|
|
||||||
go 1.19
|
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/crunchy-labs/crunchyroll-go/v3 v3.0.4
|
|
||||||
github.com/grafov/m3u8 v0.11.1
|
|
||||||
github.com/spf13/cobra v1.6.1
|
|
||||||
)
|
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
|
||||||
github.com/spf13/pflag v1.0.5 // indirect
|
|
||||||
)
|
|
||||||
14
go.sum
14
go.sum
|
|
@ -1,14 +0,0 @@
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
|
||||||
github.com/crunchy-labs/crunchyroll-go/v3 v3.0.4 h1:QCHlk0PEfrm7uPgLm2RNtwXED3ACKlhD9xlrlGsPhDI=
|
|
||||||
github.com/crunchy-labs/crunchyroll-go/v3 v3.0.4/go.mod h1:SjTQD3IX7Z+MLsMSd2fP5ttsJ4KtpXY6r08bHLwrOLM=
|
|
||||||
github.com/grafov/m3u8 v0.11.1 h1:igZ7EBIB2IAsPPazKwRKdbhxcoBKO3lO1UY57PZDeNA=
|
|
||||||
github.com/grafov/m3u8 v0.11.1/go.mod h1:nqzOkfBiZJENr52zTVd/Dcl03yzphIMbJqkXGu+u080=
|
|
||||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
|
||||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
|
||||||
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
|
|
||||||
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
|
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
|
||||||
9
main.go
9
main.go
|
|
@ -1,9 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/crunchy-labs/crunchy-cli/cli"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
cli.Execute()
|
|
||||||
}
|
|
||||||
4
src/main.rs
Normal file
4
src/main.rs
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
crunchy_cli_core::cli_entrypoint().await
|
||||||
|
}
|
||||||
|
|
@ -1,99 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3/utils"
|
|
||||||
"regexp"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
var urlFilter = regexp.MustCompile(`(S(\d+))?(E(\d+))?((-)(S(\d+))?(E(\d+))?)?(,|$)`)
|
|
||||||
|
|
||||||
func ExtractEpisodes(url string, locales ...crunchyroll.LOCALE) ([][]*crunchyroll.Episode, error) {
|
|
||||||
var matches [][]string
|
|
||||||
|
|
||||||
lastOpen := strings.LastIndex(url, "[")
|
|
||||||
if strings.HasSuffix(url, "]") && lastOpen != -1 && lastOpen < len(url) {
|
|
||||||
matches = urlFilter.FindAllStringSubmatch(url[lastOpen+1:len(url)-1], -1)
|
|
||||||
|
|
||||||
var all string
|
|
||||||
for _, match := range matches {
|
|
||||||
all += match[0]
|
|
||||||
}
|
|
||||||
if all != url[lastOpen+1:len(url)-1] {
|
|
||||||
return nil, fmt.Errorf("invalid episode filter")
|
|
||||||
}
|
|
||||||
url = url[:lastOpen]
|
|
||||||
}
|
|
||||||
|
|
||||||
episodes, err := Crunchy.ExtractEpisodesFromUrl(url, locales...)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to get episodes: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(episodes) == 0 {
|
|
||||||
return nil, fmt.Errorf("no episodes found")
|
|
||||||
}
|
|
||||||
|
|
||||||
if matches != nil {
|
|
||||||
for _, match := range matches {
|
|
||||||
fromSeason, fromEpisode, toSeason, toEpisode := -1, -1, -1, -1
|
|
||||||
if match[2] != "" {
|
|
||||||
fromSeason, _ = strconv.Atoi(match[2])
|
|
||||||
}
|
|
||||||
if match[4] != "" {
|
|
||||||
fromEpisode, _ = strconv.Atoi(match[4])
|
|
||||||
}
|
|
||||||
if match[8] != "" {
|
|
||||||
toSeason, _ = strconv.Atoi(match[8])
|
|
||||||
}
|
|
||||||
if match[10] != "" {
|
|
||||||
toEpisode, _ = strconv.Atoi(match[10])
|
|
||||||
}
|
|
||||||
|
|
||||||
if match[6] != "-" {
|
|
||||||
toSeason = fromSeason
|
|
||||||
toEpisode = fromEpisode
|
|
||||||
}
|
|
||||||
|
|
||||||
tmpEps := make([]*crunchyroll.Episode, 0)
|
|
||||||
for _, episode := range episodes {
|
|
||||||
if fromSeason != -1 && (episode.SeasonNumber < fromSeason || (fromEpisode != -1 && episode.EpisodeNumber < fromEpisode)) {
|
|
||||||
continue
|
|
||||||
} else if fromSeason == -1 && fromEpisode != -1 && episode.EpisodeNumber < fromEpisode {
|
|
||||||
continue
|
|
||||||
} else if toSeason != -1 && (episode.SeasonNumber > toSeason || (toEpisode != -1 && episode.EpisodeNumber > toEpisode)) {
|
|
||||||
continue
|
|
||||||
} else if toSeason == -1 && toEpisode != -1 && episode.EpisodeNumber > toEpisode {
|
|
||||||
continue
|
|
||||||
} else {
|
|
||||||
tmpEps = append(tmpEps, episode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(tmpEps) == 0 {
|
|
||||||
return nil, fmt.Errorf("no episodes are matching the given filter")
|
|
||||||
}
|
|
||||||
|
|
||||||
episodes = tmpEps
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var final [][]*crunchyroll.Episode
|
|
||||||
if len(locales) > 0 {
|
|
||||||
final = make([][]*crunchyroll.Episode, len(locales))
|
|
||||||
localeSorted, err := utils.SortEpisodesByAudio(episodes)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to get audio locale: %v", err)
|
|
||||||
}
|
|
||||||
for i, locale := range locales {
|
|
||||||
final[i] = append(final[i], localeSorted[locale]...)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
final = [][]*crunchyroll.Episode{episodes}
|
|
||||||
}
|
|
||||||
|
|
||||||
return final, nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,49 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func FreeFileName(filename string) (string, bool) {
|
|
||||||
ext := filepath.Ext(filename)
|
|
||||||
base := strings.TrimSuffix(filename, ext)
|
|
||||||
// checks if a .tar stands before the "actual" file ending
|
|
||||||
if extraExt := filepath.Ext(base); extraExt == ".tar" {
|
|
||||||
ext = extraExt + ext
|
|
||||||
base = strings.TrimSuffix(base, extraExt)
|
|
||||||
}
|
|
||||||
j := 0
|
|
||||||
for ; ; j++ {
|
|
||||||
if _, stat := os.Stat(filename); stat != nil && !os.IsExist(stat) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
filename = fmt.Sprintf("%s (%d)%s", base, j+1, ext)
|
|
||||||
}
|
|
||||||
return filename, j != 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func GenerateFilename(name, directory string) string {
|
|
||||||
if runtime.GOOS != "windows" {
|
|
||||||
for _, char := range []string{"/"} {
|
|
||||||
name = strings.ReplaceAll(name, char, "")
|
|
||||||
}
|
|
||||||
Log.Debug("Replaced invalid characters (not windows)")
|
|
||||||
} else {
|
|
||||||
// ahh i love windows :)))
|
|
||||||
for _, char := range []string{"\\", "<", ">", ":", "\"", "/", "|", "?", "*"} {
|
|
||||||
name = strings.ReplaceAll(name, char, "")
|
|
||||||
}
|
|
||||||
Log.Debug("Replaced invalid characters (windows)")
|
|
||||||
}
|
|
||||||
|
|
||||||
filename, changed := FreeFileName(filepath.Join(directory, name))
|
|
||||||
if changed {
|
|
||||||
Log.Debug("File `%s` already exists, changing name to `%s`", filepath.Base(name), filepath.Base(filename))
|
|
||||||
}
|
|
||||||
|
|
||||||
return filename
|
|
||||||
}
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3"
|
|
||||||
"reflect"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
type FormatInformation struct {
|
|
||||||
// the Format to download
|
|
||||||
Format *crunchyroll.Format
|
|
||||||
|
|
||||||
// additional formats which are only used by archive.go
|
|
||||||
AdditionalFormats []*crunchyroll.Format
|
|
||||||
|
|
||||||
Title string `json:"title"`
|
|
||||||
SeriesName string `json:"series_name"`
|
|
||||||
SeasonName string `json:"season_name"`
|
|
||||||
SeasonNumber int `json:"season_number"`
|
|
||||||
EpisodeNumber int `json:"episode_number"`
|
|
||||||
Resolution string `json:"resolution"`
|
|
||||||
FPS float64 `json:"fps"`
|
|
||||||
Audio crunchyroll.LOCALE `json:"audio"`
|
|
||||||
Subtitle crunchyroll.LOCALE `json:"subtitle"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fi FormatInformation) FormatString(source string) string {
|
|
||||||
fields := reflect.TypeOf(fi)
|
|
||||||
values := reflect.ValueOf(fi)
|
|
||||||
|
|
||||||
for i := 0; i < fields.NumField(); i++ {
|
|
||||||
var valueAsString string
|
|
||||||
switch value := values.Field(i); value.Kind() {
|
|
||||||
case reflect.String:
|
|
||||||
valueAsString = value.String()
|
|
||||||
case reflect.Int:
|
|
||||||
valueAsString = fmt.Sprintf("%02d", value.Int())
|
|
||||||
case reflect.Float64:
|
|
||||||
valueAsString = fmt.Sprintf("%.2f", value.Float())
|
|
||||||
case reflect.Bool:
|
|
||||||
valueAsString = fields.Field(i).Tag.Get("json")
|
|
||||||
if !value.Bool() {
|
|
||||||
valueAsString = "no " + valueAsString
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if runtime.GOOS != "windows" {
|
|
||||||
for _, char := range []string{"/"} {
|
|
||||||
valueAsString = strings.ReplaceAll(valueAsString, char, "")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for _, char := range []string{"\\", "<", ">", ":", "\"", "/", "|", "?", "*"} {
|
|
||||||
valueAsString = strings.ReplaceAll(valueAsString, char, "")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
source = strings.ReplaceAll(source, "{"+fields.Field(i).Tag.Get("json")+"}", valueAsString)
|
|
||||||
}
|
|
||||||
|
|
||||||
return source
|
|
||||||
}
|
|
||||||
|
|
@ -1,51 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type headerRoundTripper struct {
|
|
||||||
http.RoundTripper
|
|
||||||
header map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rht headerRoundTripper) RoundTrip(r *http.Request) (*http.Response, error) {
|
|
||||||
resp, err := rht.RoundTripper.RoundTrip(r)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for k, v := range rht.header {
|
|
||||||
resp.Header.Set(k, v)
|
|
||||||
}
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func CreateOrDefaultClient(proxy, useragent string) (*http.Client, error) {
|
|
||||||
if proxy == "" {
|
|
||||||
return http.DefaultClient, nil
|
|
||||||
} else {
|
|
||||||
proxyURL, err := url.Parse(proxy)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var rt http.RoundTripper = &http.Transport{
|
|
||||||
DisableCompression: true,
|
|
||||||
Proxy: http.ProxyURL(proxyURL),
|
|
||||||
}
|
|
||||||
if useragent != "" {
|
|
||||||
rt = headerRoundTripper{
|
|
||||||
RoundTripper: rt,
|
|
||||||
header: map[string]string{"User-Agent": useragent},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
client := &http.Client{
|
|
||||||
Transport: rt,
|
|
||||||
Timeout: 30 * time.Second,
|
|
||||||
}
|
|
||||||
return client, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3/utils"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"runtime"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SystemLocale receives the system locale
|
|
||||||
// https://stackoverflow.com/questions/51829386/golang-get-system-language/51831590#51831590
|
|
||||||
func SystemLocale(verbose bool) crunchyroll.LOCALE {
|
|
||||||
if lang, ok := os.LookupEnv("CRUNCHY_LANG"); ok {
|
|
||||||
return crunchyroll.LOCALE(lang)
|
|
||||||
}
|
|
||||||
|
|
||||||
if runtime.GOOS != "windows" {
|
|
||||||
if lang, ok := os.LookupEnv("LANG"); ok {
|
|
||||||
var l crunchyroll.LOCALE
|
|
||||||
if preSuffix := strings.Split(strings.Split(lang, ".")[0], "_"); len(preSuffix) == 1 {
|
|
||||||
l = crunchyroll.LOCALE(preSuffix[0])
|
|
||||||
} else {
|
|
||||||
prefix := strings.Split(lang, "_")[0]
|
|
||||||
l = crunchyroll.LOCALE(fmt.Sprintf("%s-%s", prefix, preSuffix[1]))
|
|
||||||
}
|
|
||||||
if !utils.ValidateLocale(l) {
|
|
||||||
if verbose {
|
|
||||||
Log.Err("%s is not a supported locale, using %s as fallback", l, crunchyroll.US)
|
|
||||||
}
|
|
||||||
l = crunchyroll.US
|
|
||||||
}
|
|
||||||
return l
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
cmd := exec.Command("powershell", "Get-Culture | select -exp Name")
|
|
||||||
if output, err := cmd.Output(); err == nil {
|
|
||||||
l := crunchyroll.LOCALE(strings.Trim(string(output), "\r\n"))
|
|
||||||
if !utils.ValidateLocale(l) {
|
|
||||||
if verbose {
|
|
||||||
Log.Err("%s is not a supported locale, using %s as fallback", l, crunchyroll.US)
|
|
||||||
}
|
|
||||||
l = crunchyroll.US
|
|
||||||
}
|
|
||||||
return l
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if verbose {
|
|
||||||
Log.Err("Failed to get locale, using %s", crunchyroll.US)
|
|
||||||
}
|
|
||||||
return crunchyroll.US
|
|
||||||
}
|
|
||||||
|
|
||||||
func LocalesAsStrings() (locales []string) {
|
|
||||||
for _, locale := range utils.AllLocales {
|
|
||||||
locales = append(locales, string(locale))
|
|
||||||
}
|
|
||||||
sort.Strings(locales)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
type Logger interface {
|
|
||||||
IsDev() bool
|
|
||||||
Debug(format string, v ...any)
|
|
||||||
Info(format string, v ...any)
|
|
||||||
Warn(format string, v ...any)
|
|
||||||
Err(format string, v ...any)
|
|
||||||
Empty()
|
|
||||||
SetProcess(format string, v ...any)
|
|
||||||
StopProcess(format string, v ...any)
|
|
||||||
}
|
|
||||||
177
utils/save.go
177
utils/save.go
|
|
@ -1,177 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/aes"
|
|
||||||
"crypto/cipher"
|
|
||||||
"crypto/rand"
|
|
||||||
"crypto/sha256"
|
|
||||||
"fmt"
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func SaveSession(crunchy *crunchyroll.Crunchyroll) error {
|
|
||||||
file := filepath.Join(os.TempDir(), ".crunchy")
|
|
||||||
return os.WriteFile(file, []byte(crunchy.RefreshToken), 0600)
|
|
||||||
}
|
|
||||||
|
|
||||||
func SaveCredentialsPersistent(user, password string, encryptionKey []byte) error {
|
|
||||||
configDir, err := os.UserConfigDir()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
file := filepath.Join(configDir, "crunchy-cli", "crunchy")
|
|
||||||
|
|
||||||
var credentials []byte
|
|
||||||
if encryptionKey != nil {
|
|
||||||
hashedEncryptionKey := sha256.Sum256(encryptionKey)
|
|
||||||
block, err := aes.NewCipher(hashedEncryptionKey[:])
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
gcm, err := cipher.NewGCM(block)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
nonce := make([]byte, gcm.NonceSize())
|
|
||||||
if _, err = io.ReadFull(rand.Reader, nonce); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
b := gcm.Seal(nonce, nonce, []byte(fmt.Sprintf("%s\n%s", user, password)), nil)
|
|
||||||
credentials = append([]byte("aes:"), b...)
|
|
||||||
} else {
|
|
||||||
credentials = []byte(fmt.Sprintf("%s\n%s", user, password))
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = os.MkdirAll(filepath.Join(configDir, "crunchy-cli"), 0755); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return os.WriteFile(file, credentials, 0600)
|
|
||||||
}
|
|
||||||
|
|
||||||
func SaveSessionPersistent(crunchy *crunchyroll.Crunchyroll) error {
|
|
||||||
configDir, err := os.UserConfigDir()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
file := filepath.Join(configDir, "crunchy-cli", "crunchy")
|
|
||||||
|
|
||||||
if err = os.MkdirAll(filepath.Join(configDir, "crunchy-cli"), 0755); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return os.WriteFile(file, []byte(crunchy.RefreshToken), 0600)
|
|
||||||
}
|
|
||||||
|
|
||||||
func IsTempSession() bool {
|
|
||||||
file := filepath.Join(os.TempDir(), ".crunchy")
|
|
||||||
if _, err := os.Stat(file); !os.IsNotExist(err) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func IsSavedSessionEncrypted() (bool, error) {
|
|
||||||
configDir, err := os.UserConfigDir()
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
file := filepath.Join(configDir, "crunchy-cli", "crunchy")
|
|
||||||
body, err := os.ReadFile(file)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
return strings.HasPrefix(string(body), "aes:"), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func LoadSession(encryptionKey []byte) (*crunchyroll.Crunchyroll, error) {
|
|
||||||
file := filepath.Join(os.TempDir(), ".crunchy")
|
|
||||||
crunchy, err := loadTempSession(file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if crunchy != nil {
|
|
||||||
return crunchy, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
configDir, err := os.UserConfigDir()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
file = filepath.Join(configDir, "crunchy-cli", "crunchy")
|
|
||||||
crunchy, err = loadPersistentSession(file, encryptionKey)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if crunchy != nil {
|
|
||||||
return crunchy, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, fmt.Errorf("not logged in")
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadTempSession(file string) (*crunchyroll.Crunchyroll, error) {
|
|
||||||
if _, err := os.Stat(file); !os.IsNotExist(err) {
|
|
||||||
body, err := os.ReadFile(file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
crunchy, err := crunchyroll.LoginWithRefreshToken(string(body), SystemLocale(true), Client)
|
|
||||||
if err != nil {
|
|
||||||
Log.Debug("Failed to login with temp refresh token: %v", err)
|
|
||||||
} else {
|
|
||||||
Log.Debug("Logged in with refresh token %s. BLANK THIS LINE OUT IF YOU'RE ASKED TO POST THE DEBUG OUTPUT SOMEWHERE", body)
|
|
||||||
return crunchy, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadPersistentSession(file string, encryptionKey []byte) (crunchy *crunchyroll.Crunchyroll, err error) {
|
|
||||||
if _, err = os.Stat(file); !os.IsNotExist(err) {
|
|
||||||
body, err := os.ReadFile(file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
split := strings.SplitN(string(body), "\n", 2)
|
|
||||||
if len(split) == 1 || split[1] == "" && strings.HasPrefix(split[0], "aes:") {
|
|
||||||
encrypted := body[4:]
|
|
||||||
hashedEncryptionKey := sha256.Sum256(encryptionKey)
|
|
||||||
block, err := aes.NewCipher(hashedEncryptionKey[:])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
gcm, err := cipher.NewGCM(block)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
nonce, cypherText := encrypted[:gcm.NonceSize()], encrypted[gcm.NonceSize():]
|
|
||||||
b, err := gcm.Open(nil, nonce, cypherText, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
split = strings.SplitN(string(b), "\n", 2)
|
|
||||||
}
|
|
||||||
if len(split) == 2 {
|
|
||||||
if crunchy, err = crunchyroll.LoginWithCredentials(split[0], split[1], SystemLocale(true), Client); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
Log.Debug("Logged in with credentials")
|
|
||||||
} else {
|
|
||||||
if crunchy, err = crunchyroll.LoginWithRefreshToken(split[0], SystemLocale(true), Client); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
Log.Debug("Logged in with refresh token %s. BLANK THIS LINE OUT IF YOU'RE ASKED TO POST THE DEBUG OUTPUT SOMEWHERE", split[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
// the refresh token is written to a temp file to reduce the amount of re-logging in.
|
|
||||||
// it seems like that crunchyroll has also a little cooldown if a user logs in too often in a short time
|
|
||||||
if err = os.WriteFile(filepath.Join(os.TempDir(), ".crunchy"), []byte(crunchy.RefreshToken), 0600); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
10
utils/std.go
10
utils/std.go
|
|
@ -1,10 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
func ElementInSlice[T comparable](check T, toCheck []T) bool {
|
|
||||||
for _, item := range toCheck {
|
|
||||||
if check == item {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
import "os/exec"
|
|
||||||
|
|
||||||
func HasFFmpeg() bool {
|
|
||||||
return exec.Command("ffmpeg", "-h").Run() == nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,14 +0,0 @@
|
||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/crunchy-labs/crunchyroll-go/v3"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
var Version = "development"
|
|
||||||
|
|
||||||
var (
|
|
||||||
Crunchy *crunchyroll.Crunchyroll
|
|
||||||
Client *http.Client
|
|
||||||
Log Logger
|
|
||||||
)
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue