Add archive start sync flag

This commit is contained in:
bytedream 2024-04-08 13:57:06 +02:00
parent fe49161e93
commit 1a511e12f9
8 changed files with 692 additions and 179 deletions

122
Cargo.lock generated
View file

@ -179,6 +179,18 @@ version = "3.15.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa" checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa"
[[package]]
name = "bytemuck"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]] [[package]]
name = "bytes" name = "bytes"
version = "1.6.0" version = "1.6.0"
@ -369,6 +381,8 @@ dependencies = [
"fs2", "fs2",
"futures-util", "futures-util",
"http", "http",
"image",
"image_hasher",
"indicatif", "indicatif",
"lazy_static", "lazy_static",
"log", "log",
@ -936,6 +950,32 @@ dependencies = [
"unicode-normalization", "unicode-normalization",
] ]
[[package]]
name = "image"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd54d660e773627692c524beaad361aca785a4f9f5730ce91f42aabe5bce3d11"
dependencies = [
"bytemuck",
"byteorder",
"num-traits",
"zune-core",
"zune-jpeg",
]
[[package]]
name = "image_hasher"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9481465fe767d92494987319b0b447a5829edf57f09c52bf8639396abaaeaf78"
dependencies = [
"base64 0.22.0",
"image",
"rustdct",
"serde",
"transpose",
]
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "1.9.3" version = "1.9.3"
@ -1143,12 +1183,30 @@ dependencies = [
"minimal-lexical", "minimal-lexical",
] ]
[[package]]
name = "num-complex"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23c6602fda94a57c990fe0df199a035d83576b496aa29f4e634a8ac6004e68a6"
dependencies = [
"num-traits",
]
[[package]] [[package]]
name = "num-conv" name = "num-conv"
version = "0.1.0" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num-integer"
version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
dependencies = [
"num-traits",
]
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.18" version = "0.2.18"
@ -1305,6 +1363,15 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]]
name = "primal-check"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9df7f93fd637f083201473dab4fee2db4c429d32e55e3299980ab3957ab916a0"
dependencies = [
"num-integer",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.79" version = "1.0.79"
@ -1469,6 +1536,30 @@ version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
name = "rustdct"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b61555105d6a9bf98797c063c362a1d24ed8ab0431655e38f1cf51e52089551"
dependencies = [
"rustfft",
]
[[package]]
name = "rustfft"
version = "6.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43806561bc506d0c5d160643ad742e3161049ac01027b5e6d7524091fd401d86"
dependencies = [
"num-complex",
"num-integer",
"num-traits",
"primal-check",
"strength_reduce",
"transpose",
"version_check",
]
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "0.38.32" version = "0.38.32"
@ -1720,6 +1811,12 @@ version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
[[package]]
name = "strength_reduce"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe895eb47f22e2ddd4dabc02bce419d2e643c8e3b585c78158b349195bc24d82"
[[package]] [[package]]
name = "strsim" name = "strsim"
version = "0.10.0" version = "0.10.0"
@ -1998,6 +2095,16 @@ dependencies = [
"once_cell", "once_cell",
] ]
[[package]]
name = "transpose"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ad61aed86bc3faea4300c7aee358b4c6d0c8d6ccc36524c96e4c92ccf26e77e"
dependencies = [
"num-integer",
"strength_reduce",
]
[[package]] [[package]]
name = "try-lock" name = "try-lock"
version = "0.2.5" version = "0.2.5"
@ -2377,3 +2484,18 @@ name = "zeroize"
version = "1.7.0" version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
[[package]]
name = "zune-core"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a"
[[package]]
name = "zune-jpeg"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec866b44a2a1fd6133d363f073ca1b179f438f99e7e5bfb1e33f7181facfe448"
dependencies = [
"zune-core",
]

View file

@ -24,6 +24,8 @@ derive_setters = "0.1"
futures-util = { version = "0.3", features = ["io"] } futures-util = { version = "0.3", features = ["io"] }
fs2 = "0.4" fs2 = "0.4"
http = "1.1" http = "1.1"
image = { version = "0.25", features = ["jpeg"], default-features = false }
image_hasher = "2.0"
indicatif = "0.17" indicatif = "0.17"
lazy_static = "1.4" lazy_static = "1.4"
log = { version = "0.4", features = ["std"] } log = { version = "0.4", features = ["std"] }

View file

@ -10,7 +10,7 @@ use crate::utils::locale::{all_locale_in_locales, resolve_locales, LanguageTaggi
use crate::utils::log::progress; use crate::utils::log::progress;
use crate::utils::os::{free_file, has_ffmpeg, is_special_file}; use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
use crate::utils::parse::parse_url; use crate::utils::parse::parse_url;
use crate::utils::video::variant_data_from_stream; use crate::utils::video::stream_data_from_stream;
use crate::Execute; use crate::Execute;
use anyhow::bail; use anyhow::bail;
use anyhow::Result; use anyhow::Result;
@ -89,6 +89,17 @@ pub struct Archive {
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)] #[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
pub(crate) resolution: Resolution, pub(crate) resolution: Resolution,
#[arg(help = "Tries to sync the timing of all downloaded audios to match one video")]
#[arg(
long_help = "Tries to sync the timing of all downloaded audios to match one video. \
This is done by downloading the first few segments/frames of all video tracks that differ in length and comparing them frame by frame. \
The value of this flag determines how accurate the syncing is, generally speaking everything over 15 begins to be more inaccurate and everything below 6 is too accurate (and won't succeed). \
If you want to provide a custom value to this flag, you have to set it with an equals (e.g. `--sync-start=10` instead of `--sync-start 10`). \
When the syncing fails, the command is continued as if `--sync-start` wasn't provided for this episode
"
)]
#[arg(long, require_equals = true, num_args = 0..=1, default_missing_value = "7.5")]
pub(crate) sync_start: Option<f64>,
#[arg( #[arg(
help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio' and 'video'" help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio' and 'video'"
)] )]
@ -216,8 +227,19 @@ impl Execute for Archive {
} }
} }
if self.include_chapters && !matches!(self.merge, MergeBehavior::Audio) { if self.include_chapters
bail!("`--include-chapters` can only be used if `--merge` is set to 'audio'") && !matches!(self.merge, MergeBehavior::Audio)
&& self.sync_start.is_none()
{
bail!("`--include-chapters` can only be used if `--merge` is set to 'audio' or `--sync-start` is set")
}
if !matches!(self.merge, MergeBehavior::Auto) && self.sync_start.is_some() {
bail!("`--sync-start` can only be used if `--merge` is set to `auto`")
}
if self.sync_start.is_some() && self.ffmpeg_preset.is_none() {
warn!("Using `--sync-start` without `--ffmpeg-preset` might produce worse sync results than with `--ffmpeg-preset` set")
} }
if self.output.contains("{resolution}") if self.output.contains("{resolution}")
@ -294,6 +316,7 @@ impl Execute for Archive {
.audio_sort(Some(self.audio.clone())) .audio_sort(Some(self.audio.clone()))
.subtitle_sort(Some(self.subtitle.clone())) .subtitle_sort(Some(self.subtitle.clone()))
.no_closed_caption(self.no_closed_caption) .no_closed_caption(self.no_closed_caption)
.sync_start_value(self.sync_start)
.threads(self.threads) .threads(self.threads)
.audio_locale_output_map( .audio_locale_output_map(
zip(self.audio.clone(), self.output_audio_locales.clone()).collect(), zip(self.audio.clone(), self.output_audio_locales.clone()).collect(),
@ -450,7 +473,7 @@ async fn get_format(
for single_format in single_formats { for single_format in single_formats {
let stream = single_format.stream().await?; let stream = single_format.stream().await?;
let Some((video, audio, _)) = let Some((video, audio, _)) =
variant_data_from_stream(&stream, &archive.resolution, None).await? stream_data_from_stream(&stream, &archive.resolution, None).await?
else { else {
if single_format.is_episode() { if single_format.is_episode() {
bail!( bail!(
@ -569,7 +592,9 @@ async fn get_format(
video: (video, single_format.audio.clone()), video: (video, single_format.audio.clone()),
audios: vec![(audio, single_format.audio.clone())], audios: vec![(audio, single_format.audio.clone())],
subtitles, subtitles,
metadata: DownloadFormatMetadata { skip_events: None }, metadata: DownloadFormatMetadata {
skip_events: single_format.skip_events().await?,
},
}, },
)); ));
} }

View file

@ -8,7 +8,7 @@ use crate::utils::locale::{resolve_locales, LanguageTagging};
use crate::utils::log::progress; use crate::utils::log::progress;
use crate::utils::os::{free_file, has_ffmpeg, is_special_file}; use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
use crate::utils::parse::parse_url; use crate::utils::parse::parse_url;
use crate::utils::video::variant_data_from_stream; use crate::utils::video::stream_data_from_stream;
use crate::Execute; use crate::Execute;
use anyhow::bail; use anyhow::bail;
use anyhow::Result; use anyhow::Result;
@ -351,7 +351,7 @@ async fn get_format(
try_peer_hardsubs: bool, try_peer_hardsubs: bool,
) -> Result<(DownloadFormat, Format)> { ) -> Result<(DownloadFormat, Format)> {
let stream = single_format.stream().await?; let stream = single_format.stream().await?;
let Some((video, audio, contains_hardsub)) = variant_data_from_stream( let Some((video, audio, contains_hardsub)) = stream_data_from_stream(
&stream, &stream,
&download.resolution, &download.resolution,
if try_peer_hardsubs { if try_peer_hardsubs {

View file

@ -184,16 +184,29 @@ pub async fn main(args: &[String]) {
.unwrap_or_default() .unwrap_or_default()
.starts_with(".crunchy-cli_") .starts_with(".crunchy-cli_")
{ {
let result = fs::remove_file(file.path()); if file.file_type().map_or(true, |ft| ft.is_file()) {
debug!( let result = fs::remove_file(file.path());
"Ctrl-c removed temporary file {} {}", debug!(
file.path().to_string_lossy(), "Ctrl-c removed temporary file {} {}",
if result.is_ok() { file.path().to_string_lossy(),
"successfully" if result.is_ok() {
} else { "successfully"
"not successfully" } else {
} "not successfully"
) }
)
} else {
let result = fs::remove_dir_all(file.path());
debug!(
"Ctrl-c removed temporary directory {} {}",
file.path().to_string_lossy(),
if result.is_ok() {
"successfully"
} else {
"not successfully"
}
)
}
} }
} }
} }

View file

@ -1,11 +1,15 @@
use crate::utils::ffmpeg::FFmpegPreset; use crate::utils::ffmpeg::FFmpegPreset;
use crate::utils::filter::real_dedup_vec; use crate::utils::filter::real_dedup_vec;
use crate::utils::os::{cache_dir, is_special_file, temp_directory, temp_named_pipe, tempfile}; use crate::utils::log::progress;
use crate::utils::os::{
cache_dir, is_special_file, temp_directory, temp_named_pipe, tempdir, tempfile,
};
use crate::utils::rate_limit::RateLimiterService; use crate::utils::rate_limit::RateLimiterService;
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use chrono::NaiveTime; use chrono::{NaiveTime, TimeDelta};
use crunchyroll_rs::media::{SkipEvents, SkipEventsEvent, StreamData, StreamSegment, Subtitle}; use crunchyroll_rs::media::{SkipEvents, SkipEventsEvent, StreamData, StreamSegment, Subtitle};
use crunchyroll_rs::Locale; use crunchyroll_rs::Locale;
use image_hasher::{Hasher, HasherConfig, ImageHash};
use indicatif::{ProgressBar, ProgressDrawTarget, ProgressFinish, ProgressStyle}; use indicatif::{ProgressBar, ProgressDrawTarget, ProgressFinish, ProgressStyle};
use log::{debug, warn, LevelFilter}; use log::{debug, warn, LevelFilter};
use regex::Regex; use regex::Regex;
@ -59,6 +63,7 @@ pub struct DownloadBuilder {
force_hardsub: bool, force_hardsub: bool,
download_fonts: bool, download_fonts: bool,
no_closed_caption: bool, no_closed_caption: bool,
sync_start_value: Option<f64>,
threads: usize, threads: usize,
ffmpeg_threads: Option<usize>, ffmpeg_threads: Option<usize>,
audio_locale_output_map: HashMap<Locale, String>, audio_locale_output_map: HashMap<Locale, String>,
@ -78,6 +83,7 @@ impl DownloadBuilder {
force_hardsub: false, force_hardsub: false,
download_fonts: false, download_fonts: false,
no_closed_caption: false, no_closed_caption: false,
sync_start_value: None,
threads: num_cpus::get(), threads: num_cpus::get(),
ffmpeg_threads: None, ffmpeg_threads: None,
audio_locale_output_map: HashMap::new(), audio_locale_output_map: HashMap::new(),
@ -99,6 +105,8 @@ impl DownloadBuilder {
download_fonts: self.download_fonts, download_fonts: self.download_fonts,
no_closed_caption: self.no_closed_caption, no_closed_caption: self.no_closed_caption,
sync_start_value: self.sync_start_value,
download_threads: self.threads, download_threads: self.threads,
ffmpeg_threads: self.ffmpeg_threads, ffmpeg_threads: self.ffmpeg_threads,
@ -110,10 +118,23 @@ impl DownloadBuilder {
} }
} }
struct FFmpegMeta { struct FFmpegVideoMeta {
path: TempPath, path: TempPath,
language: Locale, length: TimeDelta,
title: String, start_time: Option<TimeDelta>,
}
struct FFmpegAudioMeta {
path: TempPath,
locale: Locale,
start_time: Option<TimeDelta>,
}
struct FFmpegSubtitleMeta {
path: TempPath,
locale: Locale,
cc: bool,
start_time: Option<TimeDelta>,
} }
pub struct DownloadFormat { pub struct DownloadFormat {
@ -141,6 +162,8 @@ pub struct Downloader {
download_fonts: bool, download_fonts: bool,
no_closed_caption: bool, no_closed_caption: bool,
sync_start_value: Option<f64>,
download_threads: usize, download_threads: usize,
ffmpeg_threads: Option<usize>, ffmpeg_threads: Option<usize>,
@ -216,13 +239,16 @@ impl Downloader {
} }
} }
let mut video_offset = None;
let mut audio_offsets = HashMap::new();
let mut subtitle_offsets = HashMap::new();
let mut videos = vec![]; let mut videos = vec![];
let mut audios = vec![]; let mut audios = vec![];
let mut subtitles = vec![]; let mut subtitles = vec![];
let mut fonts = vec![]; let mut fonts = vec![];
let mut chapters = None; let mut chapters = None;
let mut max_len = NaiveTime::MIN; let mut max_len = TimeDelta::min_value();
let mut max_frames = 0f64; let mut max_frames = 0;
let fmt_space = self let fmt_space = self
.formats .formats
.iter() .iter()
@ -234,115 +260,252 @@ impl Downloader {
.max() .max()
.unwrap(); .unwrap();
for (i, format) in self.formats.iter().enumerate() { if self.formats.len() > 1 && self.sync_start_value.is_some() {
let video_path = self let all_segments_count: Vec<usize> = self
.download_video( .formats
&format.video.0, .iter()
format!("{:<1$}", format!("Downloading video #{}", i + 1), fmt_space), .map(|f| f.video.0.segments().len())
) .collect();
.await?; let sync_segments = 11.max(
for (variant_data, locale) in format.audios.iter() { all_segments_count.iter().max().unwrap() - all_segments_count.iter().min().unwrap(),
let audio_path = self );
.download_audio( let mut sync_vids = vec![];
variant_data, for (i, format) in self.formats.iter().enumerate() {
format!("{:<1$}", format!("Downloading {} audio", locale), fmt_space), let path = self
.download_video(
&format.video.0,
format!("Downloading video #{} sync segments", i + 1),
Some(sync_segments),
) )
.await?; .await?;
audios.push(FFmpegMeta { sync_vids.push(SyncVideo {
path: audio_path, path,
language: locale.clone(), length: len_from_segments(&format.video.0.segments()),
title: if i == 0 { available_frames: (len_from_segments(
locale.to_human_readable() &format.video.0.segments()[0..sync_segments],
} else { )
format!("{} [Video: #{}]", locale.to_human_readable(), i + 1) .num_milliseconds() as f64
}, * format.video.0.fps().unwrap()
/ 1000.0) as u64,
idx: i,
}) })
} }
let (len, fps) = get_video_stats(&video_path)?; let _progress_handler =
progress!("Syncing video start times (this might take some time)");
let mut offsets = sync_videos(sync_vids, self.sync_start_value.unwrap())?;
drop(_progress_handler);
let mut offset_pre_checked = false;
if let Some(tmp_offsets) = &offsets {
let formats_with_offset: Vec<TimeDelta> = self
.formats
.iter()
.enumerate()
.map(|(i, f)| {
len_from_segments(&f.video.0.segments())
- TimeDelta::milliseconds(
tmp_offsets
.get(&i)
.map(|o| (*o as f64 / f.video.0.fps().unwrap() * 1000.0) as i64)
.unwrap_or_default(),
)
})
.collect();
let min = formats_with_offset.iter().min().unwrap();
let max = formats_with_offset.iter().max().unwrap();
if max.num_seconds() - min.num_seconds() > 15 {
warn!("Found difference of >15 seconds after sync, skipping applying it");
offsets = None;
offset_pre_checked = true
}
}
if let Some(offsets) = offsets {
let mut root_format_idx = 0;
let mut root_format_length = 0;
let mut audio_count: usize = 0;
let mut subtitle_count: usize = 0;
for (i, format) in self.formats.iter().enumerate() {
let format_fps = format.video.0.fps().unwrap();
let format_len = format
.video
.0
.segments()
.iter()
.map(|s| s.length.as_millis())
.sum::<u128>() as u64
- offsets.get(&i).map_or(0, |o| *o);
if format_len > root_format_length {
root_format_idx = i;
root_format_length = format_len;
}
for _ in &format.audios {
if let Some(offset) = &offsets.get(&i) {
audio_offsets.insert(
audio_count,
TimeDelta::milliseconds(
(**offset as f64 / format_fps * 1000.0) as i64,
),
);
}
audio_count += 1
}
for _ in &format.subtitles {
if let Some(offset) = &offsets.get(&i) {
subtitle_offsets.insert(
subtitle_count,
TimeDelta::milliseconds(
(**offset as f64 / format_fps * 1000.0) as i64,
),
);
}
subtitle_count += 1
}
}
let mut root_format = self.formats.remove(root_format_idx);
let mut audio_prepend = vec![];
let mut subtitle_prepend = vec![];
let mut audio_append = vec![];
let mut subtitle_append = vec![];
for (i, format) in self.formats.into_iter().enumerate() {
if i < root_format_idx {
audio_prepend.extend(format.audios);
subtitle_prepend.extend(format.subtitles);
} else {
audio_append.extend(format.audios);
subtitle_append.extend(format.subtitles);
}
}
root_format.audios.splice(0..0, audio_prepend);
root_format.subtitles.splice(0..0, subtitle_prepend);
root_format.audios.extend(audio_append);
root_format.subtitles.extend(subtitle_append);
self.formats = vec![root_format];
video_offset = offsets.get(&root_format_idx).map(|o| {
TimeDelta::milliseconds(
(*o as f64 / self.formats[0].video.0.fps().unwrap() * 1000.0) as i64,
)
})
} else if !offset_pre_checked {
warn!("Couldn't find reliable sync positions")
}
}
// downloads all videos
for (i, format) in self.formats.iter().enumerate() {
let path = self
.download_video(
&format.video.0,
format!("{:<1$}", format!("Downloading video #{}", i + 1), fmt_space),
None,
)
.await?;
let (len, fps) = get_video_stats(&path)?;
if max_len < len { if max_len < len {
max_len = len max_len = len
} }
let frames = len.signed_duration_since(NaiveTime::MIN).num_seconds() as f64 * fps; let frames = ((len.num_milliseconds() as f64
if frames > max_frames { - video_offset.unwrap_or_default().num_milliseconds() as f64)
max_frames = frames; / 1000.0
* fps) as u64;
if max_frames < frames {
max_frames = frames
} }
if !format.subtitles.is_empty() { videos.push(FFmpegVideoMeta {
let progress_spinner = if log::max_level() == LevelFilter::Info { path,
let progress_spinner = ProgressBar::new_spinner() length: len,
.with_style( start_time: video_offset,
ProgressStyle::with_template( })
format!( }
":: {:<1$} {{msg}} {{spinner}}",
"Downloading subtitles", fmt_space // downloads all audios
) for format in &self.formats {
.as_str(), for (j, (stream_data, locale)) in format.audios.iter().enumerate() {
let path = self
.download_audio(
stream_data,
format!("{:<1$}", format!("Downloading {} audio", locale), fmt_space),
)
.await?;
audios.push(FFmpegAudioMeta {
path,
locale: locale.clone(),
start_time: audio_offsets.get(&j).cloned(),
})
}
}
for (i, format) in self.formats.iter().enumerate() {
if format.subtitles.is_empty() {
continue;
}
let progress_spinner = if log::max_level() == LevelFilter::Info {
let progress_spinner = ProgressBar::new_spinner()
.with_style(
ProgressStyle::with_template(
format!(
":: {:<1$} {{msg}} {{spinner}}",
"Downloading subtitles", fmt_space
) )
.unwrap() .as_str(),
.tick_strings(&["", "\\", "|", "/", ""]),
) )
.with_finish(ProgressFinish::Abandon); .unwrap()
progress_spinner.enable_steady_tick(Duration::from_millis(100)); .tick_strings(&["", "\\", "|", "/", ""]),
Some(progress_spinner) )
} else { .with_finish(ProgressFinish::Abandon);
None progress_spinner.enable_steady_tick(Duration::from_millis(100));
}; Some(progress_spinner)
} else {
None
};
for (subtitle, not_cc) in format.subtitles.iter() { for (j, (subtitle, not_cc)) in format.subtitles.iter().enumerate() {
if !not_cc && self.no_closed_caption { if !not_cc && self.no_closed_caption {
continue; continue;
}
if let Some(pb) = &progress_spinner {
let mut progress_message = pb.message();
if !progress_message.is_empty() {
progress_message += ", "
}
progress_message += &subtitle.locale.to_string();
if !not_cc {
progress_message += " (CC)";
}
if i != 0 {
progress_message += &format!(" [Video: #{}]", i + 1);
}
pb.set_message(progress_message)
}
let mut subtitle_title = subtitle.locale.to_human_readable();
if !not_cc {
subtitle_title += " (CC)"
}
if i != 0 {
subtitle_title += &format!(" [Video: #{}]", i + 1)
}
let subtitle_path = self.download_subtitle(subtitle.clone(), len).await?;
debug!(
"Downloaded {} subtitles{}{}",
subtitle.locale,
(!not_cc).then_some(" (cc)").unwrap_or_default(),
(i != 0)
.then_some(format!(" for video {}", i))
.unwrap_or_default()
);
subtitles.push(FFmpegMeta {
path: subtitle_path,
language: subtitle.locale.clone(),
title: subtitle_title,
})
} }
}
videos.push(FFmpegMeta {
path: video_path,
language: format.video.1.clone(),
title: if self.formats.len() == 1 {
"Default".to_string()
} else {
format!("#{}", i + 1)
},
});
if let Some(pb) = &progress_spinner {
let mut progress_message = pb.message();
if !progress_message.is_empty() {
progress_message += ", "
}
progress_message += &subtitle.locale.to_string();
if !not_cc {
progress_message += " (CC)";
}
if i.min(videos.len() - 1) != 0 {
progress_message += &format!(" [Video: #{}]", i + 1);
}
pb.set_message(progress_message)
}
let path = self
.download_subtitle(subtitle.clone(), videos[i.min(videos.len() - 1)].length)
.await?;
debug!(
"Downloaded {} subtitles{}",
subtitle.locale,
(!not_cc).then_some(" (cc)").unwrap_or_default(),
);
subtitles.push(FFmpegSubtitleMeta {
path,
locale: subtitle.locale.clone(),
cc: !not_cc,
start_time: subtitle_offsets.get(&j).cloned(),
})
}
}
for format in self.formats.iter() {
if let Some(skip_events) = &format.metadata.skip_events { if let Some(skip_events) = &format.metadata.skip_events {
let (file, path) = tempfile(".chapter")?.into_parts(); let (file, path) = tempfile(".chapter")?.into_parts();
chapters = Some(( chapters = Some((
@ -421,17 +584,30 @@ impl Downloader {
let mut metadata = vec![]; let mut metadata = vec![];
for (i, meta) in videos.iter().enumerate() { for (i, meta) in videos.iter().enumerate() {
if let Some(start_time) = meta.start_time {
input.extend(["-ss".to_string(), format_time_delta(start_time)])
}
input.extend(["-i".to_string(), meta.path.to_string_lossy().to_string()]); input.extend(["-i".to_string(), meta.path.to_string_lossy().to_string()]);
maps.extend(["-map".to_string(), i.to_string()]); maps.extend(["-map".to_string(), i.to_string()]);
metadata.extend([ metadata.extend([
format!("-metadata:s:v:{}", i), format!("-metadata:s:v:{}", i),
format!("title={}", meta.title), format!(
"title={}",
if videos.len() == 1 {
"Default".to_string()
} else {
format!("#{}", i + 1)
}
),
]); ]);
// the empty language metadata is created to avoid that metadata from the original track // the empty language metadata is created to avoid that metadata from the original track
// is copied // is copied
metadata.extend([format!("-metadata:s:v:{}", i), "language=".to_string()]) metadata.extend([format!("-metadata:s:v:{}", i), "language=".to_string()])
} }
for (i, meta) in audios.iter().enumerate() { for (i, meta) in audios.iter().enumerate() {
if let Some(start_time) = meta.start_time {
input.extend(["-ss".to_string(), format_time_delta(start_time)])
}
input.extend(["-i".to_string(), meta.path.to_string_lossy().to_string()]); input.extend(["-i".to_string(), meta.path.to_string_lossy().to_string()]);
maps.extend(["-map".to_string(), (i + videos.len()).to_string()]); maps.extend(["-map".to_string(), (i + videos.len()).to_string()]);
metadata.extend([ metadata.extend([
@ -439,13 +615,20 @@ impl Downloader {
format!( format!(
"language={}", "language={}",
self.audio_locale_output_map self.audio_locale_output_map
.get(&meta.language) .get(&meta.locale)
.unwrap_or(&meta.language.to_string()) .unwrap_or(&meta.locale.to_string())
), ),
]); ]);
metadata.extend([ metadata.extend([
format!("-metadata:s:a:{}", i), format!("-metadata:s:a:{}", i),
format!("title={}", meta.title), format!(
"title={}",
if videos.len() == 1 {
meta.locale.to_human_readable()
} else {
format!("{} [Video: #{}]", meta.locale.to_human_readable(), i + 1,)
}
),
]); ]);
} }
@ -465,6 +648,9 @@ impl Downloader {
if container_supports_softsubs { if container_supports_softsubs {
for (i, meta) in subtitles.iter().enumerate() { for (i, meta) in subtitles.iter().enumerate() {
if let Some(start_time) = meta.start_time {
input.extend(["-ss".to_string(), format_time_delta(start_time)])
}
input.extend(["-i".to_string(), meta.path.to_string_lossy().to_string()]); input.extend(["-i".to_string(), meta.path.to_string_lossy().to_string()]);
maps.extend([ maps.extend([
"-map".to_string(), "-map".to_string(),
@ -475,13 +661,22 @@ impl Downloader {
format!( format!(
"language={}", "language={}",
self.subtitle_locale_output_map self.subtitle_locale_output_map
.get(&meta.language) .get(&meta.locale)
.unwrap_or(&meta.language.to_string()) .unwrap_or(&meta.locale.to_string())
), ),
]); ]);
metadata.extend([ metadata.extend([
format!("-metadata:s:s:{}", i), format!("-metadata:s:s:{}", i),
format!("title={}", meta.title), format!("title={}", {
let mut title = meta.locale.to_string();
if meta.cc {
title += " (CC)"
}
if videos.len() > 1 {
title += &format!(" [Video: #{}]", i + 1)
}
title
}),
]); ]);
} }
} }
@ -523,10 +718,7 @@ impl Downloader {
// set default subtitle // set default subtitle
if let Some(default_subtitle) = self.default_subtitle { if let Some(default_subtitle) = self.default_subtitle {
if let Some(position) = subtitles if let Some(position) = subtitles.iter().position(|m| m.locale == default_subtitle) {
.iter()
.position(|m| m.language == default_subtitle)
{
if container_supports_softsubs { if container_supports_softsubs {
match dst.extension().unwrap_or_default().to_str().unwrap() { match dst.extension().unwrap_or_default().to_str().unwrap() {
"mov" | "mp4" => output_presets.extend([ "mov" | "mp4" => output_presets.extend([
@ -585,7 +777,7 @@ impl Downloader {
if container_supports_softsubs { if container_supports_softsubs {
if let Some(position) = subtitles if let Some(position) = subtitles
.iter() .iter()
.position(|meta| meta.language == default_subtitle) .position(|meta| meta.locale == default_subtitle)
{ {
command_args.extend([ command_args.extend([
format!("-disposition:s:s:{}", position), format!("-disposition:s:s:{}", position),
@ -597,9 +789,7 @@ impl Downloader {
// set the 'forced' flag to CC subtitles // set the 'forced' flag to CC subtitles
for (i, subtitle) in subtitles.iter().enumerate() { for (i, subtitle) in subtitles.iter().enumerate() {
// well, checking if the title contains '(CC)' might not be the best solutions from a if !subtitle.cc {
// performance perspective but easier than adjusting the `FFmpegMeta` struct
if !subtitle.title.contains("(CC)") {
continue; continue;
} }
@ -632,7 +822,7 @@ impl Downloader {
// create parent directory if it does not exist // create parent directory if it does not exist
if let Some(parent) = dst.parent() { if let Some(parent) = dst.parent() {
if !parent.exists() { if !parent.exists() {
std::fs::create_dir_all(parent)? fs::create_dir_all(parent)?
} }
} }
@ -650,7 +840,7 @@ impl Downloader {
let ffmpeg_progress_cancellation_token = ffmpeg_progress_cancel.clone(); let ffmpeg_progress_cancellation_token = ffmpeg_progress_cancel.clone();
let ffmpeg_progress = tokio::spawn(async move { let ffmpeg_progress = tokio::spawn(async move {
ffmpeg_progress( ffmpeg_progress(
max_frames as u64, max_frames,
fifo, fifo,
format!("{:<1$}", "Generating output file", fmt_space + 1), format!("{:<1$}", "Generating output file", fmt_space + 1),
ffmpeg_progress_cancellation_token, ffmpeg_progress_cancellation_token,
@ -681,7 +871,7 @@ impl Downloader {
let segments = stream_data.segments(); let segments = stream_data.segments();
// sum the length of all streams up // sum the length of all streams up
estimated_required_space += estimate_variant_file_size(stream_data, &segments); estimated_required_space += estimate_stream_data_file_size(stream_data, &segments);
} }
let tmp_stat = fs2::statvfs(temp_directory()).unwrap(); let tmp_stat = fs2::statvfs(temp_directory()).unwrap();
@ -727,11 +917,16 @@ impl Downloader {
Ok((tmp_required, dst_required)) Ok((tmp_required, dst_required))
} }
async fn download_video(&self, stream_data: &StreamData, message: String) -> Result<TempPath> { async fn download_video(
&self,
stream_data: &StreamData,
message: String,
max_segments: Option<usize>,
) -> Result<TempPath> {
let tempfile = tempfile(".mp4")?; let tempfile = tempfile(".mp4")?;
let (mut file, path) = tempfile.into_parts(); let (mut file, path) = tempfile.into_parts();
self.download_segments(&mut file, message, stream_data) self.download_segments(&mut file, message, stream_data, max_segments)
.await?; .await?;
Ok(path) Ok(path)
@ -741,7 +936,7 @@ impl Downloader {
let tempfile = tempfile(".m4a")?; let tempfile = tempfile(".m4a")?;
let (mut file, path) = tempfile.into_parts(); let (mut file, path) = tempfile.into_parts();
self.download_segments(&mut file, message, stream_data) self.download_segments(&mut file, message, stream_data, None)
.await?; .await?;
Ok(path) Ok(path)
@ -750,7 +945,7 @@ impl Downloader {
async fn download_subtitle( async fn download_subtitle(
&self, &self,
subtitle: Subtitle, subtitle: Subtitle,
max_length: NaiveTime, max_length: TimeDelta,
) -> Result<TempPath> { ) -> Result<TempPath> {
let tempfile = tempfile(".ass")?; let tempfile = tempfile(".ass")?;
let (mut file, path) = tempfile.into_parts(); let (mut file, path) = tempfile.into_parts();
@ -796,14 +991,20 @@ impl Downloader {
writer: &mut impl Write, writer: &mut impl Write,
message: String, message: String,
stream_data: &StreamData, stream_data: &StreamData,
max_segments: Option<usize>,
) -> Result<()> { ) -> Result<()> {
let segments = stream_data.segments(); let mut segments = stream_data.segments();
if let Some(max_segments) = max_segments {
segments = segments
.drain(0..max_segments.min(segments.len() - 1))
.collect();
}
let total_segments = segments.len(); let total_segments = segments.len();
let count = Arc::new(Mutex::new(0)); let count = Arc::new(Mutex::new(0));
let progress = if log::max_level() == LevelFilter::Info { let progress = if log::max_level() == LevelFilter::Info {
let estimated_file_size = estimate_variant_file_size(stream_data, &segments); let estimated_file_size = estimate_stream_data_file_size(stream_data, &segments);
let progress = ProgressBar::new(estimated_file_size) let progress = ProgressBar::new(estimated_file_size)
.with_style( .with_style(
@ -820,7 +1021,7 @@ impl Downloader {
None None
}; };
let cpus = self.download_threads; let cpus = self.download_threads.min(segments.len());
let mut segs: Vec<Vec<StreamSegment>> = Vec::with_capacity(cpus); let mut segs: Vec<Vec<StreamSegment>> = Vec::with_capacity(cpus);
for _ in 0..cpus { for _ in 0..cpus {
segs.push(vec![]) segs.push(vec![])
@ -964,12 +1165,12 @@ impl Downloader {
} }
} }
fn estimate_variant_file_size(stream_data: &StreamData, segments: &[StreamSegment]) -> u64 { fn estimate_stream_data_file_size(stream_data: &StreamData, segments: &[StreamSegment]) -> u64 {
(stream_data.bandwidth / 8) * segments.iter().map(|s| s.length.as_secs()).sum::<u64>() (stream_data.bandwidth / 8) * segments.iter().map(|s| s.length.as_secs()).sum::<u64>()
} }
/// Get the length and fps of a video. /// Get the length and fps of a video.
fn get_video_stats(path: &Path) -> Result<(NaiveTime, f64)> { fn get_video_stats(path: &Path) -> Result<(TimeDelta, f64)> {
let video_length = Regex::new(r"Duration:\s(?P<time>\d+:\d+:\d+\.\d+),")?; let video_length = Regex::new(r"Duration:\s(?P<time>\d+:\d+:\d+\.\d+),")?;
let video_fps = Regex::new(r"(?P<fps>[\d/.]+)\sfps")?; let video_fps = Regex::new(r"(?P<fps>[\d/.]+)\sfps")?;
@ -996,7 +1197,8 @@ fn get_video_stats(path: &Path) -> Result<(NaiveTime, f64)> {
Ok(( Ok((
NaiveTime::parse_from_str(length_caps.name("time").unwrap().as_str(), "%H:%M:%S%.f") NaiveTime::parse_from_str(length_caps.name("time").unwrap().as_str(), "%H:%M:%S%.f")
.unwrap(), .unwrap()
.signed_duration_since(NaiveTime::MIN),
fps_caps.name("fps").unwrap().as_str().parse().unwrap(), fps_caps.name("fps").unwrap().as_str().parse().unwrap(),
)) ))
} }
@ -1125,28 +1327,12 @@ fn get_subtitle_stats(path: &Path) -> Result<Vec<String>> {
/// players. To prevent this, the subtitle entries must be manually sorted. See /// players. To prevent this, the subtitle entries must be manually sorted. See
/// [crunchy-labs/crunchy-cli#208](https://github.com/crunchy-labs/crunchy-cli/issues/208) for more /// [crunchy-labs/crunchy-cli#208](https://github.com/crunchy-labs/crunchy-cli/issues/208) for more
/// information. /// information.
fn fix_subtitles(raw: &mut Vec<u8>, max_length: NaiveTime) { fn fix_subtitles(raw: &mut Vec<u8>, max_length: TimeDelta) {
let re = Regex::new( let re = Regex::new(
r"^Dialogue:\s(?P<layer>\d+),(?P<start>\d+:\d+:\d+\.\d+),(?P<end>\d+:\d+:\d+\.\d+),", r"^Dialogue:\s(?P<layer>\d+),(?P<start>\d+:\d+:\d+\.\d+),(?P<end>\d+:\d+:\d+\.\d+),",
) )
.unwrap(); .unwrap();
// chrono panics if we try to format NaiveTime with `%2f` and the nano seconds has more than 2
// digits so them have to be reduced manually to avoid the panic
fn format_naive_time(native_time: NaiveTime) -> String {
let formatted_time = native_time.format("%f").to_string();
format!(
"{}.{}",
native_time.format("%T"),
if formatted_time.len() <= 2 {
native_time.format("%2f").to_string()
} else {
formatted_time.split_at(2).0.parse().unwrap()
}
)
.split_off(1) // <- in the ASS spec, the hour has only one digit
}
let mut entries = (vec![], vec![]); let mut entries = (vec![], vec![]);
let mut as_lines: Vec<String> = String::from_utf8_lossy(raw.as_slice()) let mut as_lines: Vec<String> = String::from_utf8_lossy(raw.as_slice())
@ -1158,12 +1344,18 @@ fn fix_subtitles(raw: &mut Vec<u8>, max_length: NaiveTime) {
if line.trim() == "[Script Info]" { if line.trim() == "[Script Info]" {
line.push_str("\nScaledBorderAndShadow: yes") line.push_str("\nScaledBorderAndShadow: yes")
} else if let Some(capture) = re.captures(line) { } else if let Some(capture) = re.captures(line) {
let mut start = capture.name("start").map_or(NaiveTime::default(), |s| { let mut start = capture
NaiveTime::parse_from_str(s.as_str(), "%H:%M:%S.%f").unwrap() .name("start")
}); .map_or(NaiveTime::default(), |s| {
let mut end = capture.name("end").map_or(NaiveTime::default(), |e| { NaiveTime::parse_from_str(s.as_str(), "%H:%M:%S.%f").unwrap()
NaiveTime::parse_from_str(e.as_str(), "%H:%M:%S.%f").unwrap() })
}); .signed_duration_since(NaiveTime::MIN);
let mut end = capture
.name("end")
.map_or(NaiveTime::default(), |e| {
NaiveTime::parse_from_str(e.as_str(), "%H:%M:%S.%f").unwrap()
})
.signed_duration_since(NaiveTime::MIN);
if start > max_length || end > max_length { if start > max_length || end > max_length {
let layer = capture let layer = capture
@ -1183,8 +1375,8 @@ fn fix_subtitles(raw: &mut Vec<u8>, max_length: NaiveTime) {
format!( format!(
"Dialogue: {},{},{},", "Dialogue: {},{},{},",
layer, layer,
format_naive_time(start), format_time_delta(start),
format_naive_time(end) format_time_delta(end)
), ),
) )
.to_string() .to_string()
@ -1209,13 +1401,10 @@ fn fix_subtitles(raw: &mut Vec<u8>, max_length: NaiveTime) {
fn write_ffmpeg_chapters( fn write_ffmpeg_chapters(
file: &mut fs::File, file: &mut fs::File,
video_len: NaiveTime, video_len: TimeDelta,
events: &mut Vec<(&str, &SkipEventsEvent)>, events: &mut Vec<(&str, &SkipEventsEvent)>,
) -> Result<()> { ) -> Result<()> {
let video_len = video_len let video_len = video_len.num_milliseconds() as f32 / 1000.0;
.signed_duration_since(NaiveTime::MIN)
.num_milliseconds() as f32
/ 1000.0;
events.sort_by(|(_, event_a), (_, event_b)| event_a.start.total_cmp(&event_b.start)); events.sort_by(|(_, event_a), (_, event_b)| event_a.start.total_cmp(&event_b.start));
writeln!(file, ";FFMETADATA1")?; writeln!(file, ";FFMETADATA1")?;
@ -1332,3 +1521,149 @@ async fn ffmpeg_progress<R: AsyncReadExt + Unpin>(
Ok(()) Ok(())
} }
struct SyncVideo {
path: TempPath,
length: TimeDelta,
available_frames: u64,
idx: usize,
}
fn sync_videos(mut sync_videos: Vec<SyncVideo>, value: f64) -> Result<Option<HashMap<usize, u64>>> {
let mut result = HashMap::new();
let hasher = HasherConfig::new().to_hasher();
let start_frame = 50;
sync_videos.sort_by_key(|sv| sv.length);
let sync_base = sync_videos.remove(0);
let sync_hashes = extract_frame_hashes(&sync_base.path, start_frame, 100, &hasher)?;
for sync_video in sync_videos {
let mut highest_frame_match = f64::INFINITY;
let mut frame = start_frame;
let mut hashes = vec![];
loop {
if frame == sync_video.available_frames {
debug!(
"Failed to sync videos, end of stream {} reached (highest frame match: {})",
sync_video.idx + 1,
highest_frame_match
);
return Ok(None);
}
hashes.drain(0..(hashes.len() as i32 - sync_hashes.len() as i32).max(0) as usize);
hashes.extend(extract_frame_hashes(
&sync_video.path,
frame,
300 - hashes.len() as u64,
&hasher,
)?);
let check_frame_windows_result = check_frame_windows(&sync_hashes, &hashes);
if let Some(offset) = check_frame_windows_result
.iter()
.enumerate()
.find_map(|(i, cfw)| (*cfw <= value).then_some(i))
{
result.insert(sync_video.idx, frame + offset as u64 - start_frame);
break;
} else {
let curr_highest_frame_match = *check_frame_windows_result
.iter()
.min_by(|a, b| a.total_cmp(b))
.unwrap();
if curr_highest_frame_match < highest_frame_match {
highest_frame_match = curr_highest_frame_match
}
}
frame = (frame + 300 - sync_hashes.len() as u64).min(sync_video.available_frames)
}
}
Ok(Some(result))
}
fn extract_frame_hashes(
input_file: &Path,
start_frame: u64,
frame_count: u64,
hasher: &Hasher,
) -> Result<Vec<ImageHash>> {
let frame_dir = tempdir(format!(
"{}_sync_frames",
input_file
.file_name()
.unwrap_or_default()
.to_string_lossy()
.trim_end_matches(
&input_file
.file_stem()
.unwrap_or_default()
.to_string_lossy()
.to_string()
)
))?;
let extract_output = Command::new("ffmpeg")
.arg("-hide_banner")
.arg("-y")
.args(["-i", input_file.to_string_lossy().to_string().as_str()])
.args([
"-vf",
format!(
r#"select=between(n\,{}\,{}),setpts=PTS-STARTPTS"#,
start_frame,
start_frame + frame_count
)
.as_str(),
])
.args(["-vframes", frame_count.to_string().as_str()])
.arg(format!("{}/%03d.jpg", frame_dir.path().to_string_lossy()))
.output()?;
if !extract_output.status.success() {
bail!(
"{}",
String::from_utf8_lossy(extract_output.stderr.as_slice())
)
}
let mut hashes = vec![];
for file in frame_dir.path().read_dir()? {
let file = file?;
let img = image::open(file.path())?;
hashes.push(hasher.hash_image(&img))
}
Ok(hashes)
}
fn check_frame_windows(base_hashes: &[ImageHash], check_hashes: &[ImageHash]) -> Vec<f64> {
let mut results = vec![];
for i in 0..(check_hashes.len() - base_hashes.len()) {
let check_window = &check_hashes[i..(base_hashes.len() + i)];
let sum = std::iter::zip(base_hashes, check_window)
.map(|(a, b)| a.dist(b))
.sum::<u32>();
results.push(sum as f64 / check_window.len() as f64);
}
results
}
fn format_time_delta(time_delta: TimeDelta) -> String {
let hours = time_delta.num_hours();
let minutes = time_delta.num_minutes() - time_delta.num_hours() * 60;
let seconds = time_delta.num_seconds() - time_delta.num_minutes() * 60;
let milliseconds = time_delta.num_milliseconds() - time_delta.num_seconds() * 1000;
format!(
"{}:{:0>2}:{:0>2}.{:0>3}",
hours, minutes, seconds, milliseconds
)
}
fn len_from_segments(segments: &[StreamSegment]) -> TimeDelta {
TimeDelta::milliseconds(segments.iter().map(|s| s.length.as_millis()).sum::<u128>() as i64)
}

View file

@ -7,7 +7,7 @@ use std::pin::Pin;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use std::task::{Context, Poll}; use std::task::{Context, Poll};
use std::{env, fs, io}; use std::{env, fs, io};
use tempfile::{Builder, NamedTempFile, TempPath}; use tempfile::{Builder, NamedTempFile, TempDir, TempPath};
use tokio::io::{AsyncRead, ReadBuf}; use tokio::io::{AsyncRead, ReadBuf};
pub fn has_ffmpeg() -> bool { pub fn has_ffmpeg() -> bool {
@ -31,7 +31,7 @@ pub fn temp_directory() -> PathBuf {
} }
/// Any tempfile should be created with this function. The prefix and directory of every file /// Any tempfile should be created with this function. The prefix and directory of every file
/// created with this method stays the same which is helpful to query all existing tempfiles and /// created with this function stays the same which is helpful to query all existing tempfiles and
/// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like /// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like
/// setting the wrong prefix if done manually. /// setting the wrong prefix if done manually.
pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> { pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
@ -46,6 +46,22 @@ pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
Ok(tempfile) Ok(tempfile)
} }
/// Any tempdir should be created with this function. The prefix and directory of every directory
/// created with this function stays the same which is helpful to query all existing tempdirs and
/// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like
/// setting the wrong prefix if done manually.
pub fn tempdir<S: AsRef<str>>(suffix: S) -> io::Result<TempDir> {
let tempdir = Builder::default()
.prefix(".crunchy-cli_")
.suffix(suffix.as_ref())
.tempdir_in(temp_directory())?;
debug!(
"Created temporary directory: {}",
tempdir.path().to_string_lossy()
);
Ok(tempdir)
}
pub fn cache_dir<S: AsRef<str>>(name: S) -> io::Result<PathBuf> { pub fn cache_dir<S: AsRef<str>>(name: S) -> io::Result<PathBuf> {
let cache_dir = temp_directory().join(format!(".crunchy-cli_{}_cache", name.as_ref())); let cache_dir = temp_directory().join(format!(".crunchy-cli_{}_cache", name.as_ref()));
fs::create_dir_all(&cache_dir)?; fs::create_dir_all(&cache_dir)?;

View file

@ -2,7 +2,7 @@ use anyhow::{bail, Result};
use crunchyroll_rs::media::{Resolution, Stream, StreamData}; use crunchyroll_rs::media::{Resolution, Stream, StreamData};
use crunchyroll_rs::Locale; use crunchyroll_rs::Locale;
pub async fn variant_data_from_stream( pub async fn stream_data_from_stream(
stream: &Stream, stream: &Stream,
resolution: &Resolution, resolution: &Resolution,
subtitle: Option<Locale>, subtitle: Option<Locale>,