Rewrite it in Rust

This commit is contained in:
ByteDream 2022-10-20 18:52:08 +02:00
parent d4bef511cb
commit 039d7cfb81
51 changed files with 4018 additions and 3208 deletions

View file

@ -0,0 +1,29 @@
[package]
name = "crunchy-cli-core"
version = "0.1.0"
edition = "2021"
[features]
# Embed a static curl library into the binary instead of just linking it.
static-curl = ["crunchyroll-rs/static-curl"]
# Embed a static openssl library into the binary instead of just linking it. If you want to compile this project against
# musl and have openssl issues, this might solve these issues.
static-ssl = ["crunchyroll-rs/static-ssl"]
[dependencies]
anyhow = "1.0"
async-trait = "0.1"
clap = { version = "4.0", features = ["derive"] }
chrono = "0.4"
crunchyroll-rs = { git = "https://github.com/crunchy-labs/crunchyroll-rs", default-features = false, features = ["stream", "parse"] }
ctrlc = "3.2"
dirs = "4.0"
isahc = { git = "https://github.com/sagebind/isahc", rev = "34f158ef" }
log = { version = "0.4", features = ["std"] }
num_cpus = "1.13"
regex = "1.6"
signal-hook = "0.3"
tempfile = "3.3"
terminal_size = "0.2"
tokio = { version = "1.21", features = ["macros", "rt-multi-thread", "time"] }
sys-locale = "0.2"

View file

@ -0,0 +1,567 @@
use crate::cli::log::tab_info;
use crate::cli::utils::{download_segments, find_resolution};
use crate::utils::context::Context;
use crate::utils::format::{format_string, Format};
use crate::utils::log::progress;
use crate::utils::os::{free_file, tempfile};
use crate::utils::parse::{parse_url, UrlFilter};
use crate::utils::sort::{sort_formats_after_seasons, sort_seasons_after_number};
use crate::Execute;
use anyhow::{bail, Result};
use crunchyroll_rs::media::{Resolution, StreamSubtitle};
use crunchyroll_rs::{Locale, Media, MediaCollection, Series};
use log::{debug, error, info};
use regex::Regex;
use std::collections::BTreeMap;
use std::io::Write;
use std::path::PathBuf;
use std::process::{Command, Stdio};
use tempfile::TempPath;
#[derive(Clone, Debug)]
pub enum MergeBehavior {
Auto,
Audio,
Video,
}
fn parse_merge_behavior(s: &str) -> Result<MergeBehavior, String> {
Ok(match s.to_lowercase().as_str() {
"auto" => MergeBehavior::Auto,
"audio" => MergeBehavior::Audio,
"video" => MergeBehavior::Video,
_ => return Err(format!("'{}' is not a valid merge behavior", s)),
})
}
#[derive(Debug, clap::Parser)]
#[clap(about = "Archive a video")]
#[command(arg_required_else_help(true))]
#[command()]
pub struct Archive {
#[arg(help = format!("Audio languages. Can be used multiple times. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Audio languages. Can be used multiple times. \
Available languages are:\n{}", Locale::all().into_iter().map(|l| format!("{:<6} {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
#[arg(short, long, default_values_t = vec![crate::utils::locale::system_locale(), Locale::ja_JP])]
audio: Vec<Locale>,
#[arg(help = format!("Subtitle languages. Can be used multiple times. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Subtitle languages. Can be used multiple times. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(short, long, default_values_t = Locale::all())]
subtitle: Vec<Locale>,
#[arg(help = "Name of the output file")]
#[arg(long_help = "Name of the output file.\
If you use one of the following pattern they will get replaced:\n \
{title} Title of the video\n \
{series_name} Name of the series\n \
{season_name} Name of the season\n \
{audio} Audio language of the video\n \
{resolution} Resolution of the video\n \
{season_number} Number of the season\n \
{episode_number} Number of the episode\n \
{series_id} ID of the series\n \
{season_id} ID of the season\n \
{episode_id} ID of the episode")]
#[arg(short, long, default_value = "{title}.mkv")]
output: String,
#[arg(help = "Video resolution")]
#[arg(long_help = "The video resolution.\
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
Specifying the exact pixels is not recommended, use one of the other options instead. \
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
The available common-use words are 'best' (choose the best resolution available) and 'worst' (worst resolution available)")]
#[arg(short, long, default_value = "best")]
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
resolution: Resolution,
#[arg(
help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio' and 'video'"
)]
#[arg(
long_help = "Because of local restrictions (or other reasons) some episodes with different languages does not have the same length (e.g. when some scenes were cut out). \
With this flag you can set the behavior when handling multiple language.
Valid options are 'audio' (stores one video and all other languages as audio only), 'video' (stores the video + audio for every language) and 'auto' (detects if videos differ in length: if so, behave like 'video' else like 'audio')"
)]
#[arg(short, long, default_value = "auto")]
#[arg(value_parser = parse_merge_behavior)]
merge: MergeBehavior,
#[arg(
help = "Set which subtitle language should be set as default / auto shown when starting a video"
)]
#[arg(long)]
default_subtitle: Option<Locale>,
#[arg(help = "Disable subtitle optimizations")]
#[arg(
long_help = "By default, Crunchyroll delivers subtitles in a format which may cause issues in some video players. \
These issues are fixed internally by setting a flag which is not part of the official specification of the subtitle format. \
If you do not want this fixes or they cause more trouble than they solve (for you), it can be disabled with this flag"
)]
#[arg(long)]
no_subtitle_optimizations: bool,
#[arg(help = "Crunchyroll series url(s)")]
urls: Vec<String>,
}
#[async_trait::async_trait(?Send)]
impl Execute for Archive {
async fn execute(self, ctx: Context) -> Result<()> {
let mut parsed_urls = vec![];
for (i, url) in self.urls.iter().enumerate() {
let _progress_handler = progress!("Parsing url {}", i + 1);
match parse_url(&ctx.crunchy, url.clone(), true).await {
Ok((media_collection, url_filter)) => {
parsed_urls.push((media_collection, url_filter));
info!("Parsed url {}", i + 1)
}
Err(e) => bail!("url {} could not be parsed: {}", url, e),
}
}
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
let archive_formats = match media_collection {
MediaCollection::Series(series) => {
let _progress_handler = progress!("Fetching series details");
formats_from_series(&self, series, &url_filter).await?
}
MediaCollection::Season(_) => bail!("Archiving a season is not supported"),
MediaCollection::Episode(episode) => bail!("Archiving a episode is not supported. Use url filtering instead to specify the episode (https://www.crunchyroll.com/series/{}/{}[S{}E{}])", episode.metadata.series_id, episode.metadata.series_slug_title, episode.metadata.season_number, episode.metadata.episode_number),
MediaCollection::MovieListing(_) => bail!("Archiving a movie listing is not supported"),
MediaCollection::Movie(_) => bail!("Archiving a movie is not supported")
};
if archive_formats.is_empty() {
info!("Skipping url {} (no matching episodes found)", i + 1);
continue;
}
info!("Loaded series information for url {}", i + 1);
if log::max_level() == log::Level::Debug {
let seasons = sort_formats_after_seasons(
archive_formats
.clone()
.into_iter()
.map(|(a, _)| a.get(0).unwrap().clone())
.collect(),
);
debug!("Series has {} seasons", seasons.len());
for (i, season) in seasons.into_iter().enumerate() {
info!("Season {} ({})", i + 1, season.get(0).unwrap().season_title);
for format in season {
info!(
"{}: {}px, {:.02} FPS (S{:02}E{:02})",
format.title,
format.stream.resolution,
format.stream.fps,
format.season_number,
format.number,
)
}
}
} else {
for season in sort_formats_after_seasons(
archive_formats
.clone()
.into_iter()
.map(|(a, _)| a.get(0).unwrap().clone())
.collect(),
) {
let first = season.get(0).unwrap();
info!(
"{} Season {} ({})",
first.series_name, first.season_number, first.season_title
);
for (i, format) in season.into_iter().enumerate() {
tab_info!(
"{}. {} » {}px, {:.2} FPS (S{:02}E{:02})",
i + 1,
format.title,
format.stream.resolution,
format.stream.fps,
format.season_number,
format.number
)
}
}
}
for (formats, subtitles) in archive_formats {
let (primary, additionally) = formats.split_first().unwrap();
let mut path = PathBuf::from(&self.output);
path = free_file(
path.with_file_name(format_string(
if let Some(fname) = path.file_name() {
fname.to_str().unwrap()
} else {
"{title}.mkv"
}
.to_string(),
primary,
)),
)
.0;
info!(
"Downloading {} to '{}'",
primary.title,
path.to_str().unwrap()
);
tab_info!(
"Episode: S{:02}E{:02}",
primary.season_number,
primary.number
);
tab_info!(
"Audio: {} (primary), {}",
primary.audio,
additionally
.iter()
.map(|a| a.audio.to_string())
.collect::<Vec<String>>()
.join(", ")
);
tab_info!(
"Subtitle: {}",
subtitles
.iter()
.map(|s| {
if let Some(default) = &self.default_subtitle {
if default == &s.locale {
return format!("{} (primary)", default);
}
}
s.locale.to_string()
})
.collect::<Vec<String>>()
.join(", ")
);
tab_info!("Resolution: {}", primary.stream.resolution);
tab_info!("FPS: {:.2}", primary.stream.fps);
let mut video_paths = vec![];
let mut audio_paths = vec![];
let mut subtitle_paths = vec![];
video_paths.push((download_video(&ctx, primary, false).await?, primary));
for additional in additionally {
let only_audio = match self.merge {
MergeBehavior::Auto => additionally
.iter()
.all(|a| a.stream.bandwidth == primary.stream.bandwidth),
MergeBehavior::Audio => true,
MergeBehavior::Video => false,
};
let path = download_video(&ctx, additional, only_audio).await?;
if only_audio {
audio_paths.push((path, additional))
} else {
video_paths.push((path, additional))
}
}
for subtitle in subtitles {
subtitle_paths
.push((download_subtitle(&self, subtitle.clone()).await?, subtitle))
}
generate_mkv(&self, path, video_paths, audio_paths, subtitle_paths)?
}
}
Ok(())
}
}
async fn formats_from_series(
archive: &Archive,
series: Media<Series>,
url_filter: &UrlFilter,
) -> Result<Vec<(Vec<Format>, Vec<StreamSubtitle>)>> {
let mut seasons = series.seasons().await?;
// filter any season out which does not contain the specified audio languages
for season in sort_seasons_after_number(seasons.clone()) {
// get all locales which are specified but not present in the current iterated season and
// print an error saying this
let not_present_audio = archive
.audio
.clone()
.into_iter()
.filter(|l| !season.iter().any(|s| &s.metadata.audio_locale == l))
.collect::<Vec<Locale>>();
for not_present in not_present_audio {
error!(
"Season {} of series {} is not available with {} audio",
season.first().unwrap().metadata.season_number,
series.title,
not_present
)
}
// remove all seasons with the wrong audio for the current iterated season number
seasons.retain(|s| {
s.metadata.season_number != season.first().unwrap().metadata.season_number
|| archive.audio.contains(&s.metadata.audio_locale)
})
}
#[allow(clippy::type_complexity)]
let mut result: BTreeMap<u32, BTreeMap<u32, (Vec<Format>, Vec<StreamSubtitle>)>> =
BTreeMap::new();
for season in series.seasons().await? {
if !url_filter.is_season_valid(season.metadata.season_number)
|| !archive.audio.contains(&season.metadata.audio_locale)
{
continue;
}
for episode in season.episodes().await? {
if !url_filter.is_episode_valid(
episode.metadata.episode_number,
episode.metadata.season_number,
) {
continue;
}
let streams = episode.streams().await?;
let streaming_data = streams.streaming_data(None).await?;
let Some(stream) = find_resolution(streaming_data, &archive.resolution) else {
bail!(
"Resolution ({}x{}) is not available for episode {} ({}) of season {} ({}) of {}",
archive.resolution.width,
archive.resolution.height,
episode.metadata.episode_number,
episode.title,
episode.metadata.season_number,
episode.metadata.season_title,
episode.metadata.series_title
)
};
let (ref mut formats, _) = result
.entry(season.metadata.season_number)
.or_insert_with(BTreeMap::new)
.entry(episode.metadata.episode_number)
.or_insert_with(|| {
let subtitles: Vec<StreamSubtitle> = archive
.subtitle
.iter()
.filter_map(|l| streams.subtitles.get(l).cloned())
.collect();
(vec![], subtitles)
});
formats.push(Format::new_from_episode(episode, stream));
}
}
Ok(result.into_values().flat_map(|v| v.into_values()).collect())
}
async fn download_video(ctx: &Context, format: &Format, only_audio: bool) -> Result<TempPath> {
let tempfile = if only_audio {
tempfile(".aac")?
} else {
tempfile(".ts")?
};
let (_, path) = tempfile.into_parts();
let ffmpeg = Command::new("ffmpeg")
.stdin(Stdio::piped())
.stdout(Stdio::null())
.stderr(Stdio::piped())
.arg("-y")
.args(["-f", "mpegts", "-i", "pipe:"])
.args(if only_audio { vec!["-vn"] } else { vec![] })
.arg(path.to_str().unwrap())
.spawn()?;
download_segments(
ctx,
&mut ffmpeg.stdin.unwrap(),
Some(format!("Download {}", format.audio)),
format.stream.segments().await?,
)
.await?;
Ok(path)
}
async fn download_subtitle(archive: &Archive, subtitle: StreamSubtitle) -> Result<TempPath> {
let tempfile = tempfile(".ass")?;
let (mut file, path) = tempfile.into_parts();
let mut buf = vec![];
subtitle.write_to(&mut buf).await?;
if !archive.no_subtitle_optimizations {
buf = fix_subtitle(buf)
}
file.write_all(buf.as_slice())?;
Ok(path)
}
/// Add `ScaledBorderAndShadows: yes` to subtitles; without it they look very messy on some video
/// players. See [crunchy-labs/crunchy-cli#66](https://github.com/crunchy-labs/crunchy-cli/issues/66)
/// for more information.
fn fix_subtitle(raw: Vec<u8>) -> Vec<u8> {
let mut script_info = false;
let mut new = String::new();
for line in String::from_utf8_lossy(raw.as_slice()).split('\n') {
if line.trim().starts_with('[') && script_info {
new.push_str("ScaledBorderAndShadows: yes\n");
script_info = false
} else if line.trim() == "[Script Info]" {
script_info = true
}
new.push_str(line);
new.push('\n')
}
new.into_bytes()
}
fn generate_mkv(
archive: &Archive,
target: PathBuf,
video_paths: Vec<(TempPath, &Format)>,
audio_paths: Vec<(TempPath, &Format)>,
subtitle_paths: Vec<(TempPath, StreamSubtitle)>,
) -> Result<()> {
let mut input = vec![];
let mut maps = vec![];
let mut metadata = vec![];
let mut video_length = (0, 0, 0, 0);
for (i, (video_path, format)) in video_paths.iter().enumerate() {
input.extend(["-i".to_string(), video_path.to_string_lossy().to_string()]);
maps.extend(["-map".to_string(), i.to_string()]);
metadata.extend([
format!("-metadata:s:v:{}", i),
format!("language={}", format.audio),
]);
metadata.extend([
format!("-metadata:s:v:{}", i),
format!("title={}", format.audio.to_human_readable()),
]);
metadata.extend([
format!("-metadata:s:a:{}", i),
format!("language={}", format.audio),
]);
metadata.extend([
format!("-metadata:s:a:{}", i),
format!("title={}", format.audio.to_human_readable()),
]);
let vid_len = get_video_length(video_path.to_path_buf())?;
if vid_len > video_length {
video_length = vid_len
}
}
for (i, (audio_path, format)) in audio_paths.iter().enumerate() {
input.extend(["-i".to_string(), audio_path.to_string_lossy().to_string()]);
maps.extend(["-map".to_string(), (i + video_paths.len()).to_string()]);
metadata.extend([
format!("-metadata:s:a:{}", i + video_paths.len()),
format!("language={}", format.audio),
]);
metadata.extend([
format!("-metadata:s:a:{}", i + video_paths.len()),
format!("title={}", format.audio.to_human_readable()),
]);
}
for (i, (subtitle_path, subtitle)) in subtitle_paths.iter().enumerate() {
input.extend([
"-i".to_string(),
subtitle_path.to_string_lossy().to_string(),
]);
maps.extend([
"-map".to_string(),
(i + video_paths.len() + audio_paths.len()).to_string(),
]);
metadata.extend([
format!("-metadata:s:s:{}", i),
format!("language={}", subtitle.locale),
]);
metadata.extend([
format!("-metadata:s:s:{}", i),
format!("title={}", subtitle.locale.to_human_readable()),
]);
}
let mut command_args = vec!["-y".to_string()];
command_args.extend(input);
command_args.extend(maps);
command_args.extend(metadata);
// set default subtitle
if let Some(default_subtitle) = &archive.default_subtitle {
// if `--default_subtitle <locale>` is given set the default subtitle to the given locale
if let Some(position) = subtitle_paths
.into_iter()
.position(|s| &s.1.locale == default_subtitle)
{
command_args.push(format!("-disposition:s:{}", position))
} else {
command_args.extend(["-disposition:s:0".to_string(), "0".to_string()])
}
} else {
command_args.extend(["-disposition:s:0".to_string(), "0".to_string()])
}
command_args.extend([
"-c".to_string(),
"copy".to_string(),
"-f".to_string(),
"matroska".to_string(),
target.to_string_lossy().to_string(),
]);
debug!("ffmpeg {}", command_args.join(" "));
let ffmpeg = Command::new("ffmpeg")
.stdout(Stdio::null())
.stderr(Stdio::piped())
.args(command_args)
.output()?;
if !ffmpeg.status.success() {
bail!("{}", String::from_utf8_lossy(ffmpeg.stderr.as_slice()))
}
Ok(())
}
/// Get the length of a video. This is required because sometimes subtitles have an unnecessary entry
/// long after the actual video ends with artificially extends the video length on some video players.
/// To prevent this, the video length must be hard set with ffmpeg. See
/// [crunchy-labs/crunchy-cli#32](https://github.com/crunchy-labs/crunchy-cli/issues/32) for more
/// information.
fn get_video_length(path: PathBuf) -> Result<(u32, u32, u32, u32)> {
let video_length = Regex::new(r"Duration:\s?(\d+):(\d+):(\d+).(\d+),")?;
let ffmpeg = Command::new("ffmpeg")
.stdout(Stdio::null())
.stderr(Stdio::piped())
.arg("-y")
.args(["-i", path.to_str().unwrap()])
.output()?;
let ffmpeg_output = String::from_utf8(ffmpeg.stderr)?;
let caps = video_length.captures(ffmpeg_output.as_str()).unwrap();
Ok((
caps[1].parse()?,
caps[2].parse()?,
caps[3].parse()?,
caps[4].parse()?,
))
}

View file

@ -0,0 +1,452 @@
use crate::cli::log::tab_info;
use crate::cli::utils::{download_segments, find_resolution};
use crate::utils::context::Context;
use crate::utils::format::{format_string, Format};
use crate::utils::log::progress;
use crate::utils::os::{free_file, has_ffmpeg};
use crate::utils::parse::{parse_url, UrlFilter};
use crate::utils::sort::{sort_formats_after_seasons, sort_seasons_after_number};
use crate::Execute;
use anyhow::{bail, Result};
use crunchyroll_rs::media::{Resolution, VariantSegment};
use crunchyroll_rs::{
Episode, Locale, Media, MediaCollection, Movie, MovieListing, Season, Series,
};
use log::{debug, error, info};
use std::fs::File;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
#[derive(Debug, clap::Parser)]
#[clap(about = "Download a video")]
#[command(arg_required_else_help(true))]
pub struct Download {
#[arg(help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
Available languages are:\n{}", Locale::all().into_iter().map(|l| format!("{:<6} {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
#[arg(short, long, default_value_t = crate::utils::locale::system_locale())]
audio: Locale,
#[arg(help = format!("Subtitle language. Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Subtitle language. If set, the subtitle will be burned into the video and cannot be disabled. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(short, long)]
subtitle: Option<Locale>,
#[arg(help = "Name of the output file")]
#[arg(long_help = "Name of the output file.\
If you use one of the following pattern they will get replaced:\n \
{title} Title of the video\n \
{series_name} Name of the series\n \
{season_name} Name of the season\n \
{audio} Audio language of the video\n \
{resolution} Resolution of the video\n \
{season_number} Number of the season\n \
{episode_number} Number of the episode\n \
{series_id} ID of the series\n \
{season_id} ID of the season\n \
{episode_id} ID of the episode")]
#[arg(short, long, default_value = "{title}.ts")]
output: String,
#[arg(help = "Video resolution")]
#[arg(long_help = "The video resolution.\
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
Specifying the exact pixels is not recommended, use one of the other options instead. \
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
The available common-use words are 'best' (choose the best resolution available) and 'worst' (worst resolution available)")]
#[arg(short, long, default_value = "best")]
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
resolution: Resolution,
#[arg(help = "Url(s) to Crunchyroll episodes or series")]
urls: Vec<String>,
}
#[async_trait::async_trait(?Send)]
impl Execute for Download {
async fn execute(self, ctx: Context) -> Result<()> {
let mut parsed_urls = vec![];
for (i, url) in self.urls.iter().enumerate() {
let _progress_handler = progress!("Parsing url {}", i + 1);
match parse_url(&ctx.crunchy, url.clone(), true).await {
Ok((media_collection, url_filter)) => {
parsed_urls.push((media_collection, url_filter));
info!("Parsed url {}", i + 1)
}
Err(e) => bail!("url {} could not be parsed: {}", url, e),
}
}
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
let _progress_handler = progress!("Fetching series details");
let formats = match media_collection {
MediaCollection::Series(series) => {
debug!("Url {} is series ({})", i + 1, series.title);
formats_from_series(&self, series, &url_filter).await?
}
MediaCollection::Season(season) => {
debug!(
"Url {} is season {} ({})",
i + 1,
season.metadata.season_number,
season.title
);
formats_from_season(&self, season, &url_filter).await?
}
MediaCollection::Episode(episode) => {
debug!(
"Url {} is episode {} ({}) of season {} ({}) of {}",
i + 1,
episode.metadata.episode_number,
episode.title,
episode.metadata.season_number,
episode.metadata.season_title,
episode.metadata.series_title
);
format_from_episode(&self, episode, &url_filter, false)
.await?
.map(|fmt| vec![fmt])
}
MediaCollection::MovieListing(movie_listing) => {
debug!("Url {} is movie listing ({})", i + 1, movie_listing.title);
format_from_movie_listing(&self, movie_listing, &url_filter).await?
}
MediaCollection::Movie(movie) => {
debug!("Url {} is movie ({})", i + 1, movie.title);
format_from_movie(&self, movie, &url_filter)
.await?
.map(|fmt| vec![fmt])
}
};
let Some(formats) = formats else {
info!("Skipping url {} (no matching episodes found)", i + 1);
continue;
};
info!("Loaded series information for url {}", i + 1);
drop(_progress_handler);
if log::max_level() == log::Level::Debug {
let seasons = sort_formats_after_seasons(formats.clone());
debug!("Series has {} seasons", seasons.len());
for (i, season) in seasons.into_iter().enumerate() {
info!("Season {} ({})", i + 1, season.get(0).unwrap().season_title);
for format in season {
info!(
"{}: {}px, {:.02} FPS (S{:02}E{:02})",
format.title,
format.stream.resolution,
format.stream.fps,
format.season_number,
format.number,
)
}
}
} else {
for season in sort_formats_after_seasons(formats.clone()) {
let first = season.get(0).unwrap();
info!(
"{} Season {} ({})",
first.series_name, first.season_number, first.season_title
);
for (i, format) in season.into_iter().enumerate() {
tab_info!(
"{}. {} » {}px, {:.2} FPS (S{:02}E{:02})",
i + 1,
format.title,
format.stream.resolution,
format.stream.fps,
format.season_number,
format.number
)
}
}
}
for format in formats {
let mut path = PathBuf::from(&self.output);
path = free_file(
path.with_file_name(format_string(
if let Some(fname) = path.file_name() {
fname.to_str().unwrap()
} else {
"{title}.ts"
}
.to_string(),
&format,
)),
)
.0;
let use_ffmpeg = if let Some(extension) = path.extension() {
if extension != "ts" {
if !has_ffmpeg() {
bail!(
"File ending is not `.ts`, ffmpeg is required to convert the video"
)
}
true
} else {
false
}
} else {
false
};
info!(
"Downloading {} to '{}'",
format.title,
path.file_name().unwrap().to_str().unwrap()
);
tab_info!("Episode: S{:02}E{:02}", format.season_number, format.number);
tab_info!("Audio: {}", format.audio);
tab_info!(
"Subtitles: {}",
self.subtitle
.clone()
.map_or("None".to_string(), |l| l.to_string())
);
tab_info!("Resolution: {}", format.stream.resolution);
tab_info!("FPS: {:.2}", format.stream.fps);
let segments = format.stream.segments().await?;
if use_ffmpeg {
download_ffmpeg(&ctx, segments, path.as_path()).await?;
} else if path.to_str().unwrap() == "-" {
let mut stdout = std::io::stdout().lock();
download_segments(&ctx, &mut stdout, None, segments).await?;
} else {
let mut file = File::options().create(true).write(true).open(&path)?;
download_segments(&ctx, &mut file, None, segments).await?
}
}
}
Ok(())
}
}
async fn download_ffmpeg(
ctx: &Context,
segments: Vec<VariantSegment>,
target: &Path,
) -> Result<()> {
let ffmpeg = Command::new("ffmpeg")
.stdin(Stdio::piped())
.stdout(Stdio::null())
.stderr(Stdio::piped())
.arg("-y")
.args(["-f", "mpegts", "-i", "pipe:"])
.args(["-safe", "0"])
.args(["-c", "copy"])
.arg(target.to_str().unwrap())
.spawn()?;
download_segments(ctx, &mut ffmpeg.stdin.unwrap(), None, segments).await?;
Ok(())
}
async fn formats_from_series(
download: &Download,
series: Media<Series>,
url_filter: &UrlFilter,
) -> Result<Option<Vec<Format>>> {
if !series.metadata.audio_locales.is_empty()
&& !series.metadata.audio_locales.contains(&download.audio)
{
error!(
"Series {} is not available with {} audio",
series.title, download.audio
);
return Ok(None);
}
let mut seasons = series.seasons().await?;
// filter any season out which does not contain the specified audio language
for season in sort_seasons_after_number(seasons.clone()) {
// check if the current iterated season has the specified audio language
if !season
.iter()
.any(|s| s.metadata.audio_locale == download.audio)
{
error!(
"Season {} of series {} is not available with {} audio",
season.first().unwrap().metadata.season_number,
series.title,
download.audio
);
}
// remove all seasons with the wrong audio for the current iterated season number
seasons.retain(|s| {
s.metadata.season_number != season.first().unwrap().metadata.season_number
|| s.metadata.audio_locale == download.audio
})
}
let mut formats = vec![];
for season in seasons {
if let Some(fmts) = formats_from_season(download, season, url_filter).await? {
formats.extend(fmts)
}
}
Ok(some_vec_or_none(formats))
}
async fn formats_from_season(
download: &Download,
season: Media<Season>,
url_filter: &UrlFilter,
) -> Result<Option<Vec<Format>>> {
if season.metadata.audio_locale != download.audio {
error!(
"Season {} ({}) is not available with {} audio",
season.metadata.season_number, season.title, download.audio
);
return Ok(None);
} else if !url_filter.is_season_valid(season.metadata.season_number) {
return Ok(None);
}
let mut formats = vec![];
for episode in season.episodes().await? {
if let Some(fmt) = format_from_episode(download, episode, url_filter, true).await? {
formats.push(fmt)
}
}
Ok(some_vec_or_none(formats))
}
async fn format_from_episode(
download: &Download,
episode: Media<Episode>,
url_filter: &UrlFilter,
filter_audio: bool,
) -> Result<Option<Format>> {
if filter_audio && episode.metadata.audio_locale != download.audio {
error!(
"Episode {} ({}) of season {} ({}) of {} has no {} audio",
episode.metadata.episode_number,
episode.title,
episode.metadata.season_number,
episode.metadata.season_title,
episode.metadata.series_title,
download.audio
);
return Ok(None);
} else if !url_filter.is_episode_valid(
episode.metadata.episode_number,
episode.metadata.season_number,
) {
return Ok(None);
}
let streams = episode.streams().await?;
let streaming_data = if let Some(subtitle) = &download.subtitle {
if !streams.subtitles.keys().cloned().any(|x| &x == subtitle) {
error!(
"Episode {} ({}) of season {} ({}) of {} has no {} subtitles",
episode.metadata.episode_number,
episode.title,
episode.metadata.season_number,
episode.metadata.season_title,
episode.metadata.series_title,
subtitle
);
return Ok(None);
}
streams.streaming_data(Some(subtitle.clone())).await?
} else {
streams.streaming_data(None).await?
};
let Some(stream) = find_resolution(streaming_data, &download.resolution) else {
bail!(
"Resolution ({}x{}) is not available for episode {} ({}) of season {} ({}) of {}",
download.resolution.width,
download.resolution.height,
episode.metadata.episode_number,
episode.title,
episode.metadata.season_number,
episode.metadata.season_title,
episode.metadata.series_title
)
};
Ok(Some(Format::new_from_episode(episode, stream)))
}
async fn format_from_movie_listing(
download: &Download,
movie_listing: Media<MovieListing>,
url_filter: &UrlFilter,
) -> Result<Option<Vec<Format>>> {
let mut formats = vec![];
for movie in movie_listing.movies().await? {
if let Some(fmt) = format_from_movie(download, movie, url_filter).await? {
formats.push(fmt)
}
}
Ok(some_vec_or_none(formats))
}
async fn format_from_movie(
download: &Download,
movie: Media<Movie>,
_: &UrlFilter,
) -> Result<Option<Format>> {
let streams = movie.streams().await?;
let mut streaming_data = if let Some(subtitle) = &download.subtitle {
if !streams.subtitles.keys().cloned().any(|x| &x == subtitle) {
error!("Movie {} has no {} subtitles", movie.title, subtitle);
return Ok(None);
}
streams.streaming_data(Some(subtitle.clone())).await?
} else {
streams.streaming_data(None).await?
};
streaming_data.sort_by(|a, b| a.resolution.width.cmp(&b.resolution.width).reverse());
let stream = {
match download.resolution.height {
u64::MAX => streaming_data.into_iter().next().unwrap(),
u64::MIN => streaming_data.into_iter().last().unwrap(),
_ => {
if let Some(streaming_data) = streaming_data.into_iter().find(|v| {
download.resolution.height == u64::MAX
|| v.resolution.height == download.resolution.height
}) {
streaming_data
} else {
bail!(
"Resolution ({}x{}) is not available for movie {}",
download.resolution.width,
download.resolution.height,
movie.title
)
}
}
}
};
Ok(Some(Format::new_from_movie(movie, stream)))
}
fn some_vec_or_none<T>(v: Vec<T>) -> Option<Vec<T>> {
if v.is_empty() {
None
} else {
Some(v)
}
}

View file

@ -0,0 +1,197 @@
use log::{
set_boxed_logger, set_max_level, Level, LevelFilter, Log, Metadata, Record, SetLoggerError,
};
use std::io::{stdout, Write};
use std::sync::{mpsc, Mutex};
use std::thread;
use std::thread::JoinHandle;
use std::time::Duration;
struct CliProgress {
handler: JoinHandle<()>,
sender: mpsc::SyncSender<(String, Level)>,
}
impl CliProgress {
fn new(record: &Record) -> Self {
let (tx, rx) = mpsc::sync_channel(1);
let init_message = format!("{}", record.args());
let init_level = record.level();
let handler = thread::spawn(move || {
let states = ["-", "\\", "|", "/"];
let mut old_message = init_message.clone();
let mut latest_info_message = init_message;
let mut old_level = init_level;
for i in 0.. {
let (msg, level) = match rx.try_recv() {
Ok(payload) => payload,
Err(e) => match e {
mpsc::TryRecvError::Empty => (old_message.clone(), old_level),
mpsc::TryRecvError::Disconnected => break,
},
};
// clear last line
// prefix (2), space (1), state (1), space (1), message(n)
let _ = write!(stdout(), "\r {}", " ".repeat(old_message.len()));
if old_level != level || old_message != msg {
if old_level <= Level::Warn {
let _ = writeln!(stdout(), "\r:: • {}", old_message);
} else if old_level == Level::Info && level <= Level::Warn {
let _ = writeln!(stdout(), "\r:: → {}", old_message);
} else if level == Level::Info {
latest_info_message = msg.clone();
}
}
let _ = write!(
stdout(),
"\r:: {} {}",
states[i / 2 % states.len()],
if level == Level::Info {
&msg
} else {
&latest_info_message
}
);
let _ = stdout().flush();
old_message = msg;
old_level = level;
thread::sleep(Duration::from_millis(100));
}
// clear last line
// prefix (2), space (1), state (1), space (1), message(n)
let _ = write!(stdout(), "\r {}", " ".repeat(old_message.len()));
let _ = writeln!(stdout(), "\r:: ✓ {}", old_message);
let _ = stdout().flush();
});
Self {
handler,
sender: tx,
}
}
fn send(&self, record: &Record) {
let _ = self
.sender
.send((format!("{}", record.args()), record.level()));
}
fn stop(self) {
drop(self.sender);
let _ = self.handler.join();
}
}
#[allow(clippy::type_complexity)]
pub struct CliLogger {
level: LevelFilter,
progress: Mutex<Option<CliProgress>>,
}
impl Log for CliLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= self.level
}
fn log(&self, record: &Record) {
if !self.enabled(record.metadata())
|| (record.target() != "progress"
&& record.target() != "progress_end"
&& !record.target().starts_with("crunchy_cli"))
{
return;
}
if self.level >= LevelFilter::Debug {
self.extended(record);
return;
}
match record.target() {
"progress" => self.progress(record, false),
"progress_end" => self.progress(record, true),
_ => {
if self.progress.lock().unwrap().is_some() {
self.progress(record, false);
} else if record.level() > Level::Warn {
self.normal(record)
} else {
self.error(record)
}
}
}
}
fn flush(&self) {
let _ = stdout().flush();
}
}
impl CliLogger {
pub fn new(level: LevelFilter) -> Self {
Self {
level,
progress: Mutex::new(None),
}
}
pub fn init(level: LevelFilter) -> Result<(), SetLoggerError> {
set_max_level(level);
set_boxed_logger(Box::new(CliLogger::new(level)))
}
fn extended(&self, record: &Record) {
println!(
"[{}] {} {} ({}) {}",
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S"),
record.level(),
// replace the 'progress' prefix if this function is invoked via 'progress!'
record
.target()
.replacen("progress", "crunchy_cli", 1)
.replacen("progress_end", "crunchy_cli", 1),
format!("{:?}", thread::current().id())
.replace("ThreadId(", "")
.replace(')', ""),
record.args()
)
}
fn normal(&self, record: &Record) {
println!(":: {}", record.args())
}
fn error(&self, record: &Record) {
eprintln!(":: {}", record.args())
}
fn progress(&self, record: &Record, stop: bool) {
let mut progress_option = self.progress.lock().unwrap();
if stop && progress_option.is_some() {
progress_option.take().unwrap().stop()
} else if let Some(p) = &*progress_option {
p.send(record);
} else {
*progress_option = Some(CliProgress::new(record))
}
}
}
macro_rules! tab_info {
($($arg:tt)+) => {
if log::max_level() == log::LevelFilter::Debug {
info!($($arg)+)
} else {
info!("\t{}", format!($($arg)+))
}
}
}
pub(crate) use tab_info;

View file

@ -0,0 +1,39 @@
use crate::utils::context::Context;
use crate::Execute;
use anyhow::bail;
use anyhow::Result;
use crunchyroll_rs::crunchyroll::SessionToken;
use std::fs;
use std::path::PathBuf;
#[derive(Debug, clap::Parser)]
#[clap(about = "Save your login credentials persistent on disk")]
pub struct Login {
#[arg(help = "Remove your stored credentials (instead of save them)")]
#[arg(long)]
pub remove: bool,
}
#[async_trait::async_trait(?Send)]
impl Execute for Login {
async fn execute(self, ctx: Context) -> Result<()> {
if let Some(login_file_path) = login_file_path() {
match ctx.crunchy.session_token().await {
SessionToken::RefreshToken(refresh_token) => Ok(fs::write(
login_file_path,
format!("refresh_token:{}", refresh_token),
)?),
SessionToken::EtpRt(etp_rt) => {
Ok(fs::write(login_file_path, format!("etp_rt:{}", etp_rt))?)
}
SessionToken::Anonymous => bail!("Anonymous login cannot be saved"),
}
} else {
bail!("Cannot find config path")
}
}
}
pub fn login_file_path() -> Option<PathBuf> {
dirs::config_dir().map(|config_dir| config_dir.join(".crunchy-cli-core"))
}

View file

@ -0,0 +1,5 @@
pub mod archive;
pub mod download;
pub mod log;
pub mod login;
mod utils;

View file

@ -0,0 +1,178 @@
use crate::utils::context::Context;
use anyhow::Result;
use crunchyroll_rs::media::{Resolution, VariantData, VariantSegment};
use isahc::AsyncReadResponseExt;
use log::{debug, LevelFilter};
use std::borrow::{Borrow, BorrowMut};
use std::collections::BTreeMap;
use std::io;
use std::io::Write;
use std::sync::{mpsc, Arc, Mutex};
use std::time::Duration;
use tokio::task::JoinSet;
pub fn find_resolution(
mut streaming_data: Vec<VariantData>,
resolution: &Resolution,
) -> Option<VariantData> {
streaming_data.sort_by(|a, b| a.resolution.width.cmp(&b.resolution.width).reverse());
match resolution.height {
u64::MAX => Some(streaming_data.into_iter().next().unwrap()),
u64::MIN => Some(streaming_data.into_iter().last().unwrap()),
_ => streaming_data
.into_iter()
.find(|v| resolution.height == u64::MAX || v.resolution.height == resolution.height),
}
}
pub async fn download_segments(
ctx: &Context,
writer: &mut impl Write,
message: Option<String>,
segments: Vec<VariantSegment>,
) -> Result<()> {
let total_segments = segments.len();
let client = Arc::new(ctx.client.clone());
let count = Arc::new(Mutex::new(0));
let amount = Arc::new(Mutex::new(0));
// only print progress when log level is info
let output_handler = if log::max_level() == LevelFilter::Info {
let output_count = count.clone();
let output_amount = amount.clone();
Some(tokio::spawn(async move {
let sleep_time_ms = 100;
let iter_per_sec = 1000f64 / sleep_time_ms as f64;
let mut bytes_start = 0f64;
let mut speed = 0f64;
let mut percentage = 0f64;
while *output_count.lock().unwrap() < total_segments || percentage < 100f64 {
let tmp_amount = *output_amount.lock().unwrap() as f64;
let tmp_speed = (tmp_amount - bytes_start) / 1024f64 / 1024f64;
if *output_count.lock().unwrap() < 3 {
speed = tmp_speed;
} else {
let (old_speed_ratio, new_speed_ratio) = if iter_per_sec <= 1f64 {
(0f64, 1f64)
} else {
(1f64 - (1f64 / iter_per_sec), (1f64 / iter_per_sec))
};
// calculate the average download speed "smoother"
speed = (speed * old_speed_ratio) + (tmp_speed * new_speed_ratio);
}
percentage =
(*output_count.lock().unwrap() as f64 / total_segments as f64) * 100f64;
let size = terminal_size::terminal_size()
.unwrap_or((terminal_size::Width(60), terminal_size::Height(0)))
.0
.0 as usize;
let progress_available = size
- if let Some(msg) = &message {
35 + msg.len()
} else {
33
};
let progress_done_count =
(progress_available as f64 * (percentage / 100f64)).ceil() as usize;
let progress_to_do_count = progress_available - progress_done_count;
let _ = write!(
io::stdout(),
"\r:: {}{:>5.1} MiB {:>5.2} MiB/s [{}{}] {:>3}%",
message.clone().map_or("".to_string(), |msg| msg + " "),
tmp_amount / 1024f64 / 1024f64,
speed * iter_per_sec,
"#".repeat(progress_done_count),
"-".repeat(progress_to_do_count),
percentage as usize
);
bytes_start = tmp_amount;
tokio::time::sleep(Duration::from_millis(sleep_time_ms)).await;
}
println!()
}))
} else {
None
};
let cpus = num_cpus::get();
let mut segs: Vec<Vec<VariantSegment>> = Vec::with_capacity(cpus);
for _ in 0..cpus {
segs.push(vec![])
}
for (i, segment) in segments.into_iter().enumerate() {
segs[i - ((i / cpus) * cpus)].push(segment);
}
let (sender, receiver) = mpsc::channel();
let mut join_set: JoinSet<Result<()>> = JoinSet::new();
for num in 0..cpus {
let thread_client = client.clone();
let thread_sender = sender.clone();
let thread_segments = segs.remove(0);
let thread_amount = amount.clone();
let thread_count = count.clone();
join_set.spawn(async move {
for (i, segment) in thread_segments.into_iter().enumerate() {
let mut response = thread_client.get_async(&segment.url).await?;
let mut buf = response.bytes().await?.to_vec();
*thread_amount.lock().unwrap() += buf.len();
buf = VariantSegment::decrypt(buf.borrow_mut(), segment.key)?.to_vec();
debug!(
"Downloaded and decrypted segment {} ({})",
num + (i * cpus),
segment.url
);
thread_sender.send((num + (i * cpus), buf))?;
*thread_count.lock().unwrap() += 1;
}
Ok(())
});
}
let mut data_pos = 0usize;
let mut buf: BTreeMap<usize, Vec<u8>> = BTreeMap::new();
loop {
// is always `Some` because `sender` does not get dropped when all threads are finished
let data = receiver.recv().unwrap();
if data_pos == data.0 {
writer.write_all(data.1.borrow())?;
data_pos += 1;
} else {
buf.insert(data.0, data.1);
}
while let Some(b) = buf.remove(&data_pos) {
writer.write_all(b.borrow())?;
data_pos += 1;
}
if *count.lock().unwrap() >= total_segments {
break;
}
}
while let Some(joined) = join_set.join_next().await {
joined??
}
if let Some(handler) = output_handler {
handler.await?
}
Ok(())
}

196
crunchy-cli-core/src/lib.rs Normal file
View file

@ -0,0 +1,196 @@
use crate::cli::log::CliLogger;
use crate::utils::context::Context;
use crate::utils::locale::system_locale;
use crate::utils::log::progress;
use anyhow::bail;
use anyhow::Result;
use clap::{Parser, Subcommand};
use crunchyroll_rs::{Crunchyroll, Locale};
use log::{debug, error, info, LevelFilter};
use std::{env, fs};
mod cli;
mod utils;
pub use cli::{archive::Archive, download::Download, login::Login};
#[async_trait::async_trait(?Send)]
trait Execute {
async fn execute(self, ctx: Context) -> Result<()>;
}
#[derive(Debug, Parser)]
#[clap(author, version, about)]
#[clap(name = "crunchy-cli")]
pub struct Cli {
#[clap(flatten)]
verbosity: Option<Verbosity>,
#[arg(help = "Overwrite the language in which results are returned. Default is your system language")]
#[arg(long)]
lang: Option<Locale>,
#[clap(flatten)]
login_method: LoginMethod,
#[clap(subcommand)]
command: Command,
}
#[derive(Debug, Subcommand)]
enum Command {
Archive(Archive),
Download(Download),
Login(Login),
}
#[derive(Debug, Parser)]
struct Verbosity {
#[arg(help = "Verbose output")]
#[arg(short)]
v: bool,
#[arg(help = "Quiet output. Does not print anything unless it's a error")]
#[arg(long_help = "Quiet output. Does not print anything unless it's a error. Can be helpful if you pipe the output to stdout")]
#[arg(short)]
q: bool,
}
#[derive(Debug, Parser)]
struct LoginMethod {
#[arg(help = "Login with credentials (username or email and password)")]
#[arg(long_help = "Login with credentials (username or email and password). Must be provided as user:password")]
#[arg(long)]
credentials: Option<String>,
#[arg(help = "Login with the etp-rt cookie")]
#[arg(long_help = "Login with the etp-rt cookie. This can be obtained when you login on crunchyroll.com and extract it from there")]
#[arg(long)]
etp_rt: Option<String>,
}
pub async fn cli_entrypoint() {
let cli: Cli = Cli::parse();
if let Some(verbosity) = &cli.verbosity {
if verbosity.v && verbosity.q {
eprintln!("Output cannot be verbose ('-v') and quiet ('-q') at the same time");
std::process::exit(1)
} else if verbosity.v {
CliLogger::init(LevelFilter::Debug).unwrap()
} else if verbosity.q {
CliLogger::init(LevelFilter::Error).unwrap()
}
} else {
CliLogger::init(LevelFilter::Info).unwrap()
}
debug!("cli input: {:?}", cli);
let ctx = match create_ctx(&cli).await {
Ok(ctx) => ctx,
Err(e) => {
error!("{}", e);
std::process::exit(1)
}
};
debug!("Created context");
ctrlc::set_handler(move || {
debug!("Ctrl-c detected");
if let Ok(dir) = fs::read_dir(&env::temp_dir()) {
for file in dir.flatten() {
if file
.path()
.file_name()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.starts_with(".crunchy-cli_")
{
let result = fs::remove_file(file.path());
debug!(
"Ctrl-c removed temporary file {} {}",
file.path().to_string_lossy(),
if result.is_ok() {
"successfully"
} else {
"not successfully"
}
)
}
}
}
std::process::exit(1)
})
.unwrap();
debug!("Created ctrl-c handler");
let result = match cli.command {
Command::Archive(archive) => archive.execute(ctx).await,
Command::Download(download) => download.execute(ctx).await,
Command::Login(login) => {
if login.remove {
Ok(())
} else {
login.execute(ctx).await
}
}
};
if let Err(err) = result {
error!("{}", err);
std::process::exit(1)
}
}
async fn create_ctx(cli: &Cli) -> Result<Context> {
let crunchy = crunchyroll_session(cli).await?;
// TODO: Use crunchy.client() when it's possible
// currently crunchy.client() has a cloudflare bypass built-in to access crunchyroll. the servers
// where crunchy stores their videos can't handle this bypass and simply refuses to connect
let client = isahc::HttpClient::new().unwrap();
Ok(Context { crunchy, client })
}
async fn crunchyroll_session(cli: &Cli) -> Result<Crunchyroll> {
let mut builder = Crunchyroll::builder();
builder.locale(cli.lang.clone().unwrap_or_else(system_locale));
let _progress_handler = progress!("Logging in");
if cli.login_method.credentials.is_none() && cli.login_method.etp_rt.is_none() {
if let Some(login_file_path) = cli::login::login_file_path() {
if login_file_path.exists() {
let session = fs::read_to_string(login_file_path)?;
if let Some((token_type, token)) = session.split_once(':') {
match token_type {
"refresh_token" => {
return Ok(builder.login_with_refresh_token(token).await?)
}
"etp_rt" => return Ok(builder.login_with_etp_rt(token).await?),
_ => (),
}
}
bail!("Could not read stored session ('{}')", session)
}
}
bail!("Please use a login method ('--credentials' or '--etp_rt')")
} else if cli.login_method.credentials.is_some() && cli.login_method.etp_rt.is_some() {
bail!("Please use only one login method ('--credentials' or '--etp_rt')")
}
let crunchy = if let Some(credentials) = &cli.login_method.credentials {
if let Some((user, password)) = credentials.split_once(':') {
builder.login_with_credentials(user, password).await?
} else {
bail!("Invalid credentials format. Please provide your credentials as user:password")
}
} else if let Some(etp_rt) = &cli.login_method.etp_rt {
builder.login_with_etp_rt(etp_rt).await?
} else {
bail!("should never happen")
};
info!("Logged in");
Ok(crunchy)
}

View file

@ -0,0 +1,6 @@
use crate::utils::parse::parse_resolution;
use crunchyroll_rs::media::Resolution;
pub fn clap_parse_resolution(s: &str) -> Result<Resolution, String> {
parse_resolution(s.to_string()).map_err(|e| e.to_string())
}

View file

@ -0,0 +1,6 @@
use crunchyroll_rs::Crunchyroll;
pub struct Context {
pub crunchy: Crunchyroll,
pub client: isahc::HttpClient,
}

View file

@ -0,0 +1,77 @@
use crunchyroll_rs::media::VariantData;
use crunchyroll_rs::{Episode, Locale, Media, Movie};
use std::time::Duration;
#[derive(Clone)]
pub struct Format {
pub id: String,
pub title: String,
pub description: String,
pub number: u32,
pub audio: Locale,
pub duration: Duration,
pub stream: VariantData,
pub series_id: String,
pub series_name: String,
pub season_id: String,
pub season_title: String,
pub season_number: u32,
}
impl Format {
pub fn new_from_episode(episode: Media<Episode>, stream: VariantData) -> Self {
Self {
id: episode.id,
title: episode.title,
description: episode.description,
number: episode.metadata.episode_number,
audio: episode.metadata.audio_locale,
duration: episode.metadata.duration.to_std().unwrap(),
stream,
series_id: episode.metadata.series_id,
series_name: episode.metadata.series_title,
season_id: episode.metadata.season_id,
season_title: episode.metadata.season_title,
season_number: episode.metadata.season_number,
}
}
pub fn new_from_movie(movie: Media<Movie>, stream: VariantData) -> Self {
Self {
id: movie.id,
title: movie.title,
description: movie.description,
number: 1,
audio: Locale::ja_JP,
duration: movie.metadata.duration.to_std().unwrap(),
stream,
series_id: movie.metadata.movie_listing_id.clone(),
series_name: movie.metadata.movie_listing_title.clone(),
season_id: movie.metadata.movie_listing_id,
season_title: movie.metadata.movie_listing_title,
season_number: 1,
}
}
}
pub fn format_string(s: String, format: &Format) -> String {
s.replace("{title}", &format.title)
.replace("{series_name}", &format.series_name)
.replace("{season_name}", &format.season_title)
.replace("{audio}", &format.audio.to_string())
.replace("{resolution}", &format.stream.resolution.to_string())
.replace("{season_number}", &format.season_number.to_string())
.replace("{episode_number}", &format.number.to_string())
.replace("{series_id}", &format.series_id)
.replace("{season_id}", &format.season_id)
.replace("{episode_id}", &format.id)
}

View file

@ -0,0 +1,15 @@
use crunchyroll_rs::Locale;
/// Return the locale of the system.
pub fn system_locale() -> Locale {
if let Some(system_locale) = sys_locale::get_locale() {
let locale = Locale::from(system_locale);
if let Locale::Custom(_) = locale {
Locale::en_US
} else {
locale
}
} else {
Locale::en_US
}
}

View file

@ -0,0 +1,19 @@
use log::info;
pub struct ProgressHandler;
impl Drop for ProgressHandler {
fn drop(&mut self) {
info!(target: "progress_end", "")
}
}
macro_rules! progress {
($($arg:tt)+) => {
{
log::info!(target: "progress", $($arg)+);
$crate::utils::log::ProgressHandler{}
}
}
}
pub(crate) use progress;

View file

@ -0,0 +1,8 @@
pub mod clap;
pub mod context;
pub mod format;
pub mod locale;
pub mod log;
pub mod os;
pub mod parse;
pub mod sort;

View file

@ -0,0 +1,52 @@
use log::debug;
use std::io::ErrorKind;
use std::path::PathBuf;
use std::process::Command;
use std::{env, io};
use tempfile::{Builder, NamedTempFile};
pub fn has_ffmpeg() -> bool {
if let Err(e) = Command::new("ffmpeg").spawn() {
if ErrorKind::NotFound != e.kind() {
debug!(
"unknown error occurred while checking if ffmpeg exists: {}",
e.kind()
)
}
false
} else {
true
}
}
/// Any tempfiles should be created with this function. The prefix and directory of every file
/// created with this method stays the same which is helpful to query all existing tempfiles and
/// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like
/// setting the wrong prefix if done manually.
pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
let tempfile = Builder::default()
.prefix(".crunchy-cli_")
.suffix(suffix.as_ref())
.tempfile_in(&env::temp_dir())?;
debug!(
"Created temporary file: {}",
tempfile.path().to_string_lossy()
);
Ok(tempfile)
}
/// Check if the given path exists and rename it until the new (renamed) file does not exist.
pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
let mut i = 0;
while path.exists() {
i += 1;
let ext = path.extension().unwrap().to_str().unwrap();
let mut filename = path.file_name().unwrap().to_str().unwrap();
filename = &filename[0..filename.len() - ext.len() - 1];
path.set_file_name(format!("{} ({}).{}", filename, i, ext))
}
(path, i != 0)
}

View file

@ -0,0 +1,170 @@
use anyhow::{anyhow, bail, Result};
use crunchyroll_rs::media::Resolution;
use crunchyroll_rs::{Crunchyroll, MediaCollection, UrlType};
use log::debug;
use regex::Regex;
/// Define a filter, based on season and episode number to filter episodes / movies.
/// If a struct instance equals the [`Default::default()`] it's considered that no filter is applied.
/// If `from_*` is [`None`] they're set to [`u32::MIN`].
/// If `to_*` is [`None`] they're set to [`u32::MAX`].
#[derive(Debug)]
pub struct InnerUrlFilter {
from_episode: Option<u32>,
to_episode: Option<u32>,
from_season: Option<u32>,
to_season: Option<u32>,
}
#[derive(Debug, Default)]
pub struct UrlFilter {
inner: Vec<InnerUrlFilter>,
}
impl UrlFilter {
pub fn is_season_valid(&self, season: u32) -> bool {
self.inner.iter().any(|f| {
let from_season = f.from_season.unwrap_or(u32::MIN);
let to_season = f.to_season.unwrap_or(u32::MAX);
season >= from_season && season <= to_season
})
}
pub fn is_episode_valid(&self, episode: u32, season: u32) -> bool {
self.inner.iter().any(|f| {
let from_episode = f.from_episode.unwrap_or(u32::MIN);
let to_episode = f.to_episode.unwrap_or(u32::MAX);
let from_season = f.from_season.unwrap_or(u32::MIN);
let to_season = f.to_season.unwrap_or(u32::MAX);
episode >= from_episode
&& episode <= to_episode
&& season >= from_season
&& season <= to_season
})
}
}
/// Parse a url and return all [`crunchyroll_rs::Media<crunchyroll_rs::Episode>`] &
/// [`crunchyroll_rs::Media<crunchyroll_rs::Movie>`] which could be related to it.
///
/// The `with_filter` arguments says if filtering should be enabled for the url. Filtering is a
/// specific pattern at the end of the url which declares which parts of the url content should be
/// returned / filtered (out). _This only works if the url points to a series_.
///
/// Examples how filtering works:
/// - `...[E5]` - Download the fifth episode.
/// - `...[S1]` - Download the full first season.
/// - `...[-S2]` - Download all seasons up to and including season 2.
/// - `...[S3E4-]` - Download all episodes from and including season 3, episode 4.
/// - `...[S1E4-S3]` - Download all episodes from and including season 1, episode 4, until andincluding season 3.
/// - `...[S3,S5]` - Download episode 3 and 5.
/// - `...[S1-S3,S4E2-S4E6]` - Download season 1 to 3 and episode 2 to episode 6 of season 4.
/// In practice, it would look like this: `https://beta.crunchyroll.com/series/12345678/example[S1E5-S3E2]`.
pub async fn parse_url(
crunchy: &Crunchyroll,
mut url: String,
with_filter: bool,
) -> Result<(MediaCollection, UrlFilter)> {
let url_filter = if with_filter {
debug!("Url may contain filters");
let open_index = url.rfind('[').unwrap_or(0);
let close_index = url.rfind(']').unwrap_or(0);
let filter = if open_index < close_index {
let filter = url.as_str()[open_index + 1..close_index].to_string();
url = url.as_str()[0..open_index].to_string();
filter
} else {
"".to_string()
};
let filter_regex = Regex::new(r"((S(?P<from_season>\d+))?(E(?P<from_episode>\d+))?)(((?P<dash>-)((S(?P<to_season>\d+))?(E(?P<to_episode>\d+))?))?)(,|$)").unwrap();
let mut filters = vec![];
for capture in filter_regex.captures_iter(&filter) {
let dash = capture.name("dash").is_some();
let from_episode = capture
.name("from_episode")
.map_or(anyhow::Ok(None), |fe| Ok(Some(fe.as_str().parse()?)))?;
let to_episode = capture
.name("to_episode")
.map_or(anyhow::Ok(if dash { None } else { from_episode }), |te| {
Ok(Some(te.as_str().parse()?))
})?;
let from_season = capture
.name("from_season")
.map_or(anyhow::Ok(None), |fs| Ok(Some(fs.as_str().parse()?)))?;
let to_season = capture
.name("to_season")
.map_or(anyhow::Ok(if dash { None } else { from_season }), |ts| {
Ok(Some(ts.as_str().parse()?))
})?;
filters.push(InnerUrlFilter {
from_episode,
to_episode,
from_season,
to_season,
})
}
let url_filter = UrlFilter { inner: filters };
debug!("Url filter: {:?}", url_filter);
url_filter
} else {
UrlFilter::default()
};
let parsed_url = crunchyroll_rs::parse_url(url).map_or(Err(anyhow!("Invalid url")), Ok)?;
debug!("Url type: {:?}", parsed_url);
let media_collection = match parsed_url {
UrlType::Series(id) | UrlType::MovieListing(id) | UrlType::EpisodeOrMovie(id) => {
crunchy.media_collection_from_id(id).await?
}
};
Ok((media_collection, url_filter))
}
/// Parse a resolution given as a [`String`] to a [`crunchyroll_rs::media::Resolution`].
pub fn parse_resolution(mut resolution: String) -> Result<Resolution> {
resolution = resolution.to_lowercase();
if resolution == "best" {
Ok(Resolution {
width: u64::MAX,
height: u64::MAX,
})
} else if resolution == "worst" {
Ok(Resolution {
width: u64::MIN,
height: u64::MIN,
})
} else if resolution.ends_with('p') {
let without_p = resolution.as_str()[0..resolution.len() - 2]
.parse()
.map_err(|_| anyhow!("Could not parse resolution"))?;
Ok(Resolution {
width: without_p * 16 / 9,
height: without_p,
})
} else if let Some((w, h)) = resolution.split_once('x') {
Ok(Resolution {
width: w
.parse()
.map_err(|_| anyhow!("Could not parse resolution"))?,
height: h
.parse()
.map_err(|_| anyhow!("Could not parse resolution"))?,
})
} else {
bail!("Could not parse resolution")
}
}

View file

@ -0,0 +1,47 @@
use crate::utils::format::Format;
use crunchyroll_rs::{Media, Season};
use std::collections::BTreeMap;
/// Sort seasons after their season number. Crunchyroll may have multiple seasons for one season
/// number. They generally store different language in individual seasons with the same season number.
/// E.g. series X has one official season but crunchy has translations for it in 3 different languages
/// so there exist 3 different "seasons" on Crunchyroll which are actual the same season but with
/// different audio.
pub fn sort_seasons_after_number(seasons: Vec<Media<Season>>) -> Vec<Vec<Media<Season>>> {
let mut as_map = BTreeMap::new();
for season in seasons {
as_map
.entry(season.metadata.season_number)
.or_insert_with(Vec::new);
as_map
.get_mut(&season.metadata.season_number)
.unwrap()
.push(season)
}
as_map.into_values().collect()
}
/// Sort formats after their seasons and episodes (inside it) ascending. Make sure to have only
/// episodes from one series and in one language as argument since the function does not handle those
/// differences which could then lead to a semi messed up result.
pub fn sort_formats_after_seasons(formats: Vec<Format>) -> Vec<Vec<Format>> {
let mut as_map = BTreeMap::new();
for format in formats {
as_map.entry(format.season_number).or_insert_with(Vec::new);
as_map.get_mut(&format.season_number).unwrap().push(format);
}
let mut sorted = as_map
.into_iter()
.map(|(_, mut values)| {
values.sort_by(|a, b| a.number.cmp(&b.number));
values
})
.collect::<Vec<Vec<Format>>>();
sorted.sort_by(|a, b| a[0].series_id.cmp(&b[0].series_id));
sorted
}