update to latest nixpkgs/rustc

This commit is contained in:
chaos 2023-08-31 22:08:21 +01:00
parent 7ef7093eb7
commit 253db1cbd5
No known key found for this signature in database
10 changed files with 611 additions and 490 deletions

961
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -13,47 +13,44 @@ members = [
[dependencies] [dependencies]
# for decode/encoding yaml/json for transcode config & ffprobe output # for decode/encoding yaml/json for transcode config & ffprobe output
serde = { version = "1.0.143", features = ["derive"] } serde = { version = "1.0.0", features = ["derive"] }
serde_yaml = "0.9.9" serde_yaml = "0.9"
serde_json = "1.0" serde_json = "1.0"
serde_with = "1.3.1" serde_with = "3"
# argument parsing # argument parsing
clap = { version = "3.2.17", features = ["derive"] } clap = { version = "4", features = ["derive"] }
# ascii_reduce constants & transcode presets # ascii_reduce constants & transcode presets
lazy_static = "1.4.0" lazy_static = "1"
# for scan_for_music # for scan_for_music
walkdir = "2.3.2" walkdir = "2"
# format detection # format detection
infer = "0.12.0" infer = "0.15"
bytes = "1.3.0" bytes = "1"
# tag reading # tag reading
id3 = "1.3.0" id3 = "1"
metaflac = "0.2.5" metaflac = "0.2"
taglib = { path = "./modules/taglib", optional = true } taglib = { path = "./modules/taglib", optional = true }
# for genhtml command # for genhtml command
html-escape = "0.2.11" html-escape = "0.2"
urlencoding = "2.1.2" urlencoding = "2"
# error handling # error handling
thiserror = "1.0" thiserror = "1"
string-error = "0.1.0" string-error = "0.1"
# temporary file for transcode prefix file # temporary file for transcode prefix file
tempfile = "3" tempfile = "3"
# for reading ffmpeg progress output file # for reading ffmpeg progress output file
notify = "4.0.17" notify = "6"
# scoped threads
crossbeam = "0.8"
[features] [features]
default = ["taglib"] default = ["taglib"]
taglib = ["dep:taglib"] taglib = ["dep:taglib"]

View file

@ -1,6 +1,8 @@
(import (let lock = builtins.fromJSON (builtins.readFile ./flake.lock); (import (let
in fetchTarball { lock = builtins.fromJSON (builtins.readFile ./flake.lock);
url = in
"https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; fetchTarball {
sha256 = lock.nodes.flake-compat.locked.narHash; url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
}) { src = ./.; }).defaultNix sha256 = lock.nodes.flake-compat.locked.narHash;
}) {src = ./.;})
.defaultNix

View file

@ -18,16 +18,16 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1685573264, "lastModified": 1693377291,
"narHash": "sha256-Zffu01pONhs/pqH07cjlF10NnMDLok8ix5Uk4rhOnZQ=", "narHash": "sha256-vYGY9bnqEeIncNarDZYhm6KdLKgXMS+HA2mTRaWEc80=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "380be19fbd2d9079f677978361792cb25e8a3635", "rev": "e7f38be3775bab9659575f192ece011c033655f0",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "release-22.05", "ref": "nixos-unstable",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }
@ -59,11 +59,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1689068808, "lastModified": 1692799911,
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", "narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", "rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44",
"type": "github" "type": "github"
}, },
"original": { "original": {

View file

@ -2,7 +2,7 @@
description = "A tool for organising a music library"; description = "A tool for organising a music library";
inputs = { inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/release-22.05"; nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
utils.url = "github:numtide/flake-utils"; utils.url = "github:numtide/flake-utils";
flake-compat = { flake-compat = {
url = "github:edolstra/flake-compat"; url = "github:edolstra/flake-compat";

View file

@ -9,6 +9,6 @@ edition = "2021"
thiserror = "1.0" thiserror = "1.0"
[build-dependencies] [build-dependencies]
cc = "1.0" cc = "1"
pkg-config = "0.3.26" pkg-config = "0.3"
bindgen = "0.63.0" bindgen = "0.66"

View file

@ -5,10 +5,9 @@ use std::{
process::exit, process::exit,
str::FromStr, str::FromStr,
sync::{Arc, Mutex}, sync::{Arc, Mutex},
thread::scope,
}; };
use crossbeam::scope;
use crate::{ use crate::{
args::{CLIArgs, CopyCommandArgs}, args::{CLIArgs, CopyCommandArgs},
types::File, types::File,
@ -210,7 +209,7 @@ fn transcode_files(
scope(|s| { scope(|s| {
for _ in 0..copy_args.threads.unwrap() { for _ in 0..copy_args.threads.unwrap() {
s.spawn(|_| loop { s.spawn(|| loop {
let mut jobs = jobs.lock().unwrap(); let mut jobs = jobs.lock().unwrap();
let job = jobs.pop(); let job = jobs.pop();
if job.is_none() { if job.is_none() {
@ -230,8 +229,7 @@ fn transcode_files(
} }
}); });
} }
}) });
.expect("threads haunted");
} else { } else {
for file in files.iter() { for file in files.iter() {
transcode_file(file, copy_args, transcode_config.clone(), false)?; transcode_file(file, copy_args, transcode_config.clone(), false)?;

View file

@ -67,8 +67,8 @@ fn table_for_files(files: Vec<File>, includes_path: bool, link_base: Option<Stri
} }
let mut url_data = String::new(); let mut url_data = String::new();
if link_base.is_some() { if link_base.is_some() {
let mut url = String::new(); let mut url = String::new();
let link_base_str = link_base.as_ref().unwrap().clone(); let link_base_str = link_base.as_ref().unwrap().clone();
url.push_str(link_base_str.as_str()); url.push_str(link_base_str.as_str());
url.push('/'); url.push('/');
@ -76,13 +76,16 @@ fn table_for_files(files: Vec<File>, includes_path: bool, link_base: Option<Stri
let file_path = file.join_path_from_source(); let file_path = file.join_path_from_source();
let file_path: Vec<&OsStr> = file_path.iter().collect(); let file_path: Vec<&OsStr> = file_path.iter().collect();
for i in 0..(file_path.len()) { for i in 0..(file_path.len()) {
url.push_str(url_encode(file_path.get(i).unwrap().to_str().unwrap()).to_string().as_str()); url.push_str(
if i != file_path.len()-1 { url_encode(file_path.get(i).unwrap().to_str().unwrap())
url.push('/'); .to_string()
} .as_str(),
);
if i != file_path.len() - 1 {
url.push('/');
}
} }
url_data.push_str(format!("<td><a href=\"{}\">🔗</a></td>", url).as_str()); url_data.push_str(format!("<td><a href=\"{}\">🔗</a></td>", url).as_str());
} }
@ -168,7 +171,11 @@ pub fn genhtml_command(
.as_str(), .as_str(),
); );
html_content.push_str(&table_for_files(files, true, genhtml_args.link_base.clone())); html_content.push_str(&table_for_files(
files,
true,
genhtml_args.link_base.clone(),
));
html_content.push_str("</body></html>"); html_content.push_str("</body></html>");
let file_path = std::path::PathBuf::from(genhtml_args.dest.as_str()).join("index.html"); let file_path = std::path::PathBuf::from(genhtml_args.dest.as_str()).join("index.html");

View file

@ -1,7 +1,6 @@
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
use std::thread::scope;
use crossbeam::scope;
use crate::args::CLIArgs; use crate::args::CLIArgs;
use crate::args::ProcessCommandArgs; use crate::args::ProcessCommandArgs;
@ -168,7 +167,7 @@ pub fn process_command(
scope(|s| { scope(|s| {
for _ in 0..process_args.replaygain_threads.unwrap() { for _ in 0..process_args.replaygain_threads.unwrap() {
s.spawn(|_| loop { s.spawn(|| loop {
let mut jobs = jobs.lock().unwrap(); let mut jobs = jobs.lock().unwrap();
let job = jobs.pop(); let job = jobs.pop();
if job.is_none() { if job.is_none() {
@ -183,8 +182,7 @@ pub fn process_command(
} }
}); });
} }
}) });
.expect("threads haunted");
} else { } else {
for file in files.iter_mut() { for file in files.iter_mut() {
add_replaygain_tags(file, process_args.force_replaygain)?; add_replaygain_tags(file, process_args.force_replaygain)?;

View file

@ -8,7 +8,7 @@ use std::{
time::Duration, time::Duration,
}; };
use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; use notify::{EventKind, RecommendedWatcher, RecursiveMode, Watcher};
use serde::Deserialize; use serde::Deserialize;
use string_error::static_err; use string_error::static_err;
@ -81,16 +81,27 @@ pub fn progress_monitor(
let _ = &tempdir; let _ = &tempdir;
let (tx, rx) = mpsc::channel(); let (tx, rx) = mpsc::channel();
let mut watcher = watcher(tx, Duration::from_millis(100)).unwrap(); let mut watcher = RecommendedWatcher::new(
tx,
notify::Config::default().with_poll_interval(Duration::from_millis(100)),
)
.expect("could not watch for ffmpeg log progress status");
watcher watcher
.watch(&file_path, RecursiveMode::NonRecursive) .watch(&file_path, RecursiveMode::NonRecursive)
.unwrap(); .unwrap();
let mut pos = 0; let mut pos = 0;
'outer: loop { 'outer: for res in rx {
match rx.recv() { if res.is_err() {
Ok(DebouncedEvent::Write(_)) => { break 'outer;
}
let res = res.unwrap();
match res.kind {
EventKind::Modify(_) => {
let mut file = fs::File::open(&file_path).unwrap(); let mut file = fs::File::open(&file_path).unwrap();
file.seek(SeekFrom::Start(pos)).unwrap(); file.seek(SeekFrom::Start(pos)).unwrap();
@ -119,13 +130,8 @@ pub fn progress_monitor(
} }
} }
} }
Ok(DebouncedEvent::NoticeRemove(_)) => { EventKind::Remove(_) => break 'outer,
break 'outer; _ => {}
}
Ok(_) => {}
Err(_) => {
break 'outer;
}
} }
} }
}); });