update to latest nixpkgs/rustc

This commit is contained in:
chaos 2023-08-31 22:08:21 +01:00
parent 7ef7093eb7
commit 253db1cbd5
No known key found for this signature in database
10 changed files with 611 additions and 490 deletions

961
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -13,47 +13,44 @@ members = [
[dependencies]
# for decode/encoding yaml/json for transcode config & ffprobe output
serde = { version = "1.0.143", features = ["derive"] }
serde_yaml = "0.9.9"
serde = { version = "1.0.0", features = ["derive"] }
serde_yaml = "0.9"
serde_json = "1.0"
serde_with = "1.3.1"
serde_with = "3"
# argument parsing
clap = { version = "3.2.17", features = ["derive"] }
clap = { version = "4", features = ["derive"] }
# ascii_reduce constants & transcode presets
lazy_static = "1.4.0"
lazy_static = "1"
# for scan_for_music
walkdir = "2.3.2"
walkdir = "2"
# format detection
infer = "0.12.0"
bytes = "1.3.0"
infer = "0.15"
bytes = "1"
# tag reading
id3 = "1.3.0"
metaflac = "0.2.5"
id3 = "1"
metaflac = "0.2"
taglib = { path = "./modules/taglib", optional = true }
# for genhtml command
html-escape = "0.2.11"
urlencoding = "2.1.2"
html-escape = "0.2"
urlencoding = "2"
# error handling
thiserror = "1.0"
string-error = "0.1.0"
thiserror = "1"
string-error = "0.1"
# temporary file for transcode prefix file
tempfile = "3"
# for reading ffmpeg progress output file
notify = "4.0.17"
# scoped threads
crossbeam = "0.8"
notify = "6"
[features]
default = ["taglib"]
taglib = ["dep:taglib"]
taglib = ["dep:taglib"]

View file

@ -1,6 +1,8 @@
(import (let lock = builtins.fromJSON (builtins.readFile ./flake.lock);
in fetchTarball {
url =
"https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
sha256 = lock.nodes.flake-compat.locked.narHash;
}) { src = ./.; }).defaultNix
(import (let
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
in
fetchTarball {
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
sha256 = lock.nodes.flake-compat.locked.narHash;
}) {src = ./.;})
.defaultNix

View file

@ -18,16 +18,16 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1685573264,
"narHash": "sha256-Zffu01pONhs/pqH07cjlF10NnMDLok8ix5Uk4rhOnZQ=",
"lastModified": 1693377291,
"narHash": "sha256-vYGY9bnqEeIncNarDZYhm6KdLKgXMS+HA2mTRaWEc80=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "380be19fbd2d9079f677978361792cb25e8a3635",
"rev": "e7f38be3775bab9659575f192ece011c033655f0",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "release-22.05",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
@ -59,11 +59,11 @@
"systems": "systems"
},
"locked": {
"lastModified": 1689068808,
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
"lastModified": 1692799911,
"narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
"rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44",
"type": "github"
},
"original": {

View file

@ -2,7 +2,7 @@
description = "A tool for organising a music library";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/release-22.05";
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
utils.url = "github:numtide/flake-utils";
flake-compat = {
url = "github:edolstra/flake-compat";

View file

@ -9,6 +9,6 @@ edition = "2021"
thiserror = "1.0"
[build-dependencies]
cc = "1.0"
pkg-config = "0.3.26"
bindgen = "0.63.0"
cc = "1"
pkg-config = "0.3"
bindgen = "0.66"

View file

@ -5,10 +5,9 @@ use std::{
process::exit,
str::FromStr,
sync::{Arc, Mutex},
thread::scope,
};
use crossbeam::scope;
use crate::{
args::{CLIArgs, CopyCommandArgs},
types::File,
@ -210,7 +209,7 @@ fn transcode_files(
scope(|s| {
for _ in 0..copy_args.threads.unwrap() {
s.spawn(|_| loop {
s.spawn(|| loop {
let mut jobs = jobs.lock().unwrap();
let job = jobs.pop();
if job.is_none() {
@ -230,8 +229,7 @@ fn transcode_files(
}
});
}
})
.expect("threads haunted");
});
} else {
for file in files.iter() {
transcode_file(file, copy_args, transcode_config.clone(), false)?;

View file

@ -67,8 +67,8 @@ fn table_for_files(files: Vec<File>, includes_path: bool, link_base: Option<Stri
}
let mut url_data = String::new();
if link_base.is_some() {
let mut url = String::new();
if link_base.is_some() {
let mut url = String::new();
let link_base_str = link_base.as_ref().unwrap().clone();
url.push_str(link_base_str.as_str());
url.push('/');
@ -76,13 +76,16 @@ fn table_for_files(files: Vec<File>, includes_path: bool, link_base: Option<Stri
let file_path = file.join_path_from_source();
let file_path: Vec<&OsStr> = file_path.iter().collect();
for i in 0..(file_path.len()) {
url.push_str(url_encode(file_path.get(i).unwrap().to_str().unwrap()).to_string().as_str());
if i != file_path.len()-1 {
url.push('/');
}
url.push_str(
url_encode(file_path.get(i).unwrap().to_str().unwrap())
.to_string()
.as_str(),
);
if i != file_path.len() - 1 {
url.push('/');
}
}
url_data.push_str(format!("<td><a href=\"{}\">🔗</a></td>", url).as_str());
}
@ -168,7 +171,11 @@ pub fn genhtml_command(
.as_str(),
);
html_content.push_str(&table_for_files(files, true, genhtml_args.link_base.clone()));
html_content.push_str(&table_for_files(
files,
true,
genhtml_args.link_base.clone(),
));
html_content.push_str("</body></html>");
let file_path = std::path::PathBuf::from(genhtml_args.dest.as_str()).join("index.html");

View file

@ -1,7 +1,6 @@
use std::sync::Arc;
use std::sync::Mutex;
use crossbeam::scope;
use std::thread::scope;
use crate::args::CLIArgs;
use crate::args::ProcessCommandArgs;
@ -168,7 +167,7 @@ pub fn process_command(
scope(|s| {
for _ in 0..process_args.replaygain_threads.unwrap() {
s.spawn(|_| loop {
s.spawn(|| loop {
let mut jobs = jobs.lock().unwrap();
let job = jobs.pop();
if job.is_none() {
@ -183,8 +182,7 @@ pub fn process_command(
}
});
}
})
.expect("threads haunted");
});
} else {
for file in files.iter_mut() {
add_replaygain_tags(file, process_args.force_replaygain)?;

View file

@ -8,7 +8,7 @@ use std::{
time::Duration,
};
use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher};
use notify::{EventKind, RecommendedWatcher, RecursiveMode, Watcher};
use serde::Deserialize;
use string_error::static_err;
@ -81,16 +81,27 @@ pub fn progress_monitor(
let _ = &tempdir;
let (tx, rx) = mpsc::channel();
let mut watcher = watcher(tx, Duration::from_millis(100)).unwrap();
let mut watcher = RecommendedWatcher::new(
tx,
notify::Config::default().with_poll_interval(Duration::from_millis(100)),
)
.expect("could not watch for ffmpeg log progress status");
watcher
.watch(&file_path, RecursiveMode::NonRecursive)
.unwrap();
let mut pos = 0;
'outer: loop {
match rx.recv() {
Ok(DebouncedEvent::Write(_)) => {
'outer: for res in rx {
if res.is_err() {
break 'outer;
}
let res = res.unwrap();
match res.kind {
EventKind::Modify(_) => {
let mut file = fs::File::open(&file_path).unwrap();
file.seek(SeekFrom::Start(pos)).unwrap();
@ -119,13 +130,8 @@ pub fn progress_monitor(
}
}
}
Ok(DebouncedEvent::NoticeRemove(_)) => {
break 'outer;
}
Ok(_) => {}
Err(_) => {
break 'outer;
}
EventKind::Remove(_) => break 'outer,
_ => {}
}
}
});