summaryrefslogtreecommitdiff
path: root/support/pkgcheck/src
diff options
context:
space:
mode:
Diffstat (limited to 'support/pkgcheck/src')
-rw-r--r--support/pkgcheck/src/gparser.rs1
-rw-r--r--support/pkgcheck/src/linkcheck.rs4
-rw-r--r--support/pkgcheck/src/main.rs338
-rw-r--r--support/pkgcheck/src/messages/errorsd.rs26
-rw-r--r--support/pkgcheck/src/messages/fatald.rs1
-rw-r--r--support/pkgcheck/src/messages/informationd.rs1
-rw-r--r--support/pkgcheck/src/messages/mod.rs76
-rw-r--r--support/pkgcheck/src/messages/warningsd.rs19
-rw-r--r--support/pkgcheck/src/recode.rs4
-rw-r--r--support/pkgcheck/src/utils.rs46
10 files changed, 385 insertions, 131 deletions
diff --git a/support/pkgcheck/src/gparser.rs b/support/pkgcheck/src/gparser.rs
index 4c10ba9f7a..136f6f41b1 100644
--- a/support/pkgcheck/src/gparser.rs
+++ b/support/pkgcheck/src/gparser.rs
@@ -1,4 +1,3 @@
-
use log::*;
use pest::Parser;
diff --git a/support/pkgcheck/src/linkcheck.rs b/support/pkgcheck/src/linkcheck.rs
index ba0cd73cea..bd61239ef2 100644
--- a/support/pkgcheck/src/linkcheck.rs
+++ b/support/pkgcheck/src/linkcheck.rs
@@ -121,7 +121,7 @@ fn check_link(url: &str, fname: &str, urlhash: &Arc<Mutex<UrlHash>>, print_all:
match check_link_inner(&url, true) {
UrlStatus::UrlOk => {
let mut urlhash = urlhash.lock().unwrap();
- if let Some(mut hs) = urlhash.get_mut(&url) {
+ if let Some(hs) = urlhash.get_mut(&url) {
if print_all {
for p in hs.paths.iter() {
print_ok(super::ARGS.no_colors, &url, p);
@@ -132,7 +132,7 @@ fn check_link(url: &str, fname: &str, urlhash: &Arc<Mutex<UrlHash>>, print_all:
}
UrlStatus::UrlError(e) => {
let mut urlhash = urlhash.lock().unwrap();
- if let Some(mut hs) = urlhash.get_mut(&url) {
+ if let Some(hs) = urlhash.get_mut(&url) {
for p in hs.paths.iter() {
e0022!(p, e);
}
diff --git a/support/pkgcheck/src/main.rs b/support/pkgcheck/src/main.rs
index 9ff8390f9c..6ad786dcd9 100644
--- a/support/pkgcheck/src/main.rs
+++ b/support/pkgcheck/src/main.rs
@@ -26,10 +26,10 @@ use std::fmt::Display;
use std::str;
use utils::*;
+use scoped_threadpool::Pool;
use serde::{Deserialize, Serialize};
-use std::os::unix::fs::MetadataExt;
use std::borrow::Cow;
-use scoped_threadpool::Pool;
+use std::os::unix::fs::MetadataExt;
use tempfile::Builder;
@@ -54,16 +54,92 @@ use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering};
use rustc_hash::{FxHashMap, FxHashSet};
+use std::time::SystemTime;
use std::fmt::Arguments;
use std::sync::mpsc::{channel, Sender};
-use clap::{Command, CommandFactory, Parser, ValueHint};
-use clap_complete::{generate, Generator, Shell};
+use clap::builder::PossibleValue;
+use clap::{Command, CommandFactory, Parser, ValueEnum, ValueHint};
+//use clap_complete::{generate, Generator, Shell};
+use clap_complete::{shells, Generator};
+use clap_complete_nushell::Nushell;
#[cfg(unix)]
use walkdir::{DirEntry, WalkDir};
+/// Shell with auto-generated completion script available.
+#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
+#[non_exhaustive]
+pub enum Shell {
+ /// Bourne Again SHell (bash)
+ Bash,
+ /// Elvish shell
+ Elvish,
+ /// Friendly Interactive SHell (fish)
+ Fish,
+ /// PowerShell
+ PowerShell,
+ /// Z SHell (zsh)
+ Zsh,
+ /// Nu shell (nu)
+ Nu,
+}
+
+impl Generator for Shell {
+ fn file_name(&self, name: &str) -> String {
+ match self {
+ Shell::Bash => shells::Bash.file_name(name),
+ Shell::Elvish => shells::Elvish.file_name(name),
+ Shell::Fish => shells::Fish.file_name(name),
+ Shell::PowerShell => shells::PowerShell.file_name(name),
+ Shell::Zsh => shells::Zsh.file_name(name),
+ Shell::Nu => Nushell.file_name(name),
+ }
+ }
+
+ fn generate(&self, cmd: &clap::Command, buf: &mut dyn std::io::Write) {
+ match self {
+ Shell::Bash => shells::Bash.generate(cmd, buf),
+ Shell::Elvish => shells::Elvish.generate(cmd, buf),
+ Shell::Fish => shells::Fish.generate(cmd, buf),
+ Shell::PowerShell => shells::PowerShell.generate(cmd, buf),
+ Shell::Zsh => shells::Zsh.generate(cmd, buf),
+ Shell::Nu => Nushell.generate(cmd, buf),
+ }
+ }
+}
+
+// Hand-rolled so it can work even when `derive` feature is disabled
+impl ValueEnum for Shell {
+ fn value_variants<'a>() -> &'a [Self] {
+ &[
+ Shell::Bash,
+ Shell::Elvish,
+ Shell::Fish,
+ Shell::PowerShell,
+ Shell::Zsh,
+ Shell::Nu,
+ ]
+ }
+
+ fn to_possible_value<'a>(&self) -> Option<PossibleValue> {
+ Some(match self {
+ Shell::Bash => PossibleValue::new("bash"),
+ Shell::Elvish => PossibleValue::new("elvish"),
+ Shell::Fish => PossibleValue::new("fish"),
+ Shell::PowerShell => PossibleValue::new("powershell"),
+ Shell::Zsh => PossibleValue::new("zsh"),
+ Shell::Nu => PossibleValue::new("nu"),
+ })
+ }
+}
+
+fn is_future_mtime(now: SystemTime, mtime: SystemTime) -> bool {
+ mtime > now
+ //mtime > now + Duration::new(1800, 0)
+}
+
fn format_message(message: &Arguments, no_color: bool) -> Cow<'static, str> {
let msg_str = format!("{}", message);
if msg_str.starts_with(' ') {
@@ -99,7 +175,7 @@ pub struct PathExceptions {
fn get_config_file_name() -> Option<String> {
if let Some(config_file) = &ARGS.config_file {
if Path::new(&config_file).exists() {
- return Some(config_file.to_string())
+ return Some(config_file.to_string());
} else {
f0008!(config_file);
std::process::exit(1);
@@ -124,6 +200,8 @@ fn read_yaml_config() -> FxHashMap<String, String> {
for (p, q) in [
("armtex", "armenian"),
("babel-base", "babel"),
+ ("l3backend-dev", "latex-dev/l3backend"),
+ ("l3kernel-dev", "latex-dev/l3kernel"),
("latex-amsmath", "latex"),
("latex-amsmath-dev", "latex-dev"),
("latex-base", "latex"),
@@ -148,7 +226,10 @@ fn read_yaml_config() -> FxHashMap<String, String> {
let data = match fs::read_to_string(&config_filename) {
Ok(str) => str,
- Err(e) => { f0009!(&config_filename, e); std::process::exit(1); }
+ Err(e) => {
+ f0009!(&config_filename, e);
+ std::process::exit(1);
+ }
};
let path_exceptions = serde_yaml::from_str::<PathExceptions>(&data);
@@ -171,7 +252,10 @@ fn read_yaml_config() -> FxHashMap<String, String> {
}
pb
}
- Err(e) => { f0010!(e); std::process::exit(1);},
+ Err(e) => {
+ f0010!(e);
+ std::process::exit(1);
+ }
};
pkg_replacements
}
@@ -277,7 +361,11 @@ fn check_readme(dir_entry: &str, is_readme: &ReadmeKind, ft: &filemagic::Mimetyp
return false;
}
filemagic::Mimetype::Bom(b) => {
- e0029!(msg_name, b.as_ref());
+ // wegmit e0029!(msg_name, b.as_ref());
+ e0029!(
+ msg_name,
+ <unicode_bom::Bom as std::convert::AsRef<str>>::as_ref(b)
+ );
return false;
}
filemagic::Mimetype::Text(_le) => match File::open(dir_entry) {
@@ -329,55 +417,58 @@ fn _get_devno(entry: &DirEntry) -> u64 {
#[derive(Parser, Debug, PartialEq)]
#[clap(author, version, about, long_about = None)]
+#[command(arg_required_else_help(true))]
struct Args {
- #[clap(short = 'I', long = "ignore-dupes", help = "Ignore dupes")]
+ #[arg(short = 'I', long = "ignore-dupes", help = "Ignore dupes")]
ignore_dupes: bool,
- #[clap(long = "ignore-same-named", help = "Ignore same-named files")]
+ #[arg(long = "ignore-same-named", help = "Ignore same-named files")]
ignore_same_named: bool,
- #[clap(short = 'v', long = "verbose", help = "Verbose operation?")]
+ #[arg(short = 'v', long = "verbose", help = "Verbose operation?")]
verbose: bool,
- #[clap(short = 'L', long = "correct-le", help = "Correct line endings")]
+ #[arg(short = 'L', long = "correct-le", help = "Correct line endings")]
correct_le: bool,
- #[clap(short = 'C', long = "correct-perms", help = "Correct permissions")]
+ #[arg(short = 'C', long = "correct-perms", help = "Correct permissions")]
correct_perms: bool,
- #[clap(long = "no-colors", help = "Don't display messages in color")]
+ #[arg(long = "no-colors", help = "Don't display messages in color")]
no_colors: bool,
- #[clap(long = "urlcheck", help = "Check URLs found in README files")]
+ #[arg(long = "urlcheck", help = "Check URLs found in README files")]
urlcheck: bool,
- #[clap(short = 'T', long = "tds-zip", help = "tds zip archive", group = "tds", value_hint = ValueHint::FilePath)]
+ #[arg(short = 'T', long = "tds-zip", help = "tds zip archive", group = "tds", value_hint = ValueHint::FilePath)]
tds_zip: Option<String>,
- #[clap(
+ #[arg(
short = 'e',
long = "explain",
help = "Explain error or warning message",
group = "only_one"
)]
explain: Option<String>,
- #[clap(
+ #[arg(
long = "explain-all",
help = "Explains all error or warning messages",
group = "only_one"
)]
explain_all: bool,
- #[clap(long = "generate-completion", group = "only_one", value_enum)]
+ #[arg(long = "generate-completion", group = "only_one", value_enum)]
generator: Option<Shell>,
- #[clap(
+ #[arg(
long = "show-temp-endings",
help = "Show file endings for temporary files",
group = "only_one"
)]
show_tmp_endings: bool,
- #[clap(short = 'd', long = "package-dir", help = "Package directory", value_hint = ValueHint::DirPath)]
+ #[arg(short = 'd', long = "package-dir", help = "Package directory", value_hint = ValueHint::DirPath)]
pkg_dir: Option<String>,
- #[clap(long = "config-file", help = "Specify config file to use", value_hint = ValueHint::FilePath)]
+ #[arg(long = "config-file", help = "Specify config file to use", value_hint = ValueHint::FilePath)]
config_file: Option<String>,
}
// In the pas we took care to avoid visiting a single inode twice, which takes care of (false positive) hardlinks.
// Now we want to know if there is a hardlink in the package directory
#[cfg(unix)]
-fn check_inode(set: &mut FxHashMap<(u64, u64), Vec<String>>, filename: &str, meta: &Metadata) {
- set.entry((get_devno(meta), meta.ino())).or_insert_with(Vec::new).push(filename.to_string());
+fn check_inode(set: &mut FxHashMap<(u64, u64), Vec<String>>, filename: &str, meta: &Metadata) {
+ set.entry((get_devno(meta), meta.ino()))
+ .or_default()
+ .push(filename.to_string());
}
#[cfg(not(unix))]
@@ -387,6 +478,8 @@ fn check_inode(_: &mut FxHashSet<u64>, _: &Metadata) -> bool {
static ARGS: Lazy<Args> = Lazy::new(Args::parse);
static ERROR_OCCURRED: AtomicBool = AtomicBool::new(false);
+//Get the current time
+static NOW: Lazy<SystemTime> = Lazy::new(SystemTime::now);
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum DPath {
@@ -434,14 +527,15 @@ impl DupPath {
type DupHashes = FxHashMap<(u64, Vec<u8>), DupPath>;
fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
- generate(gen, cmd, cmd.get_name().to_string(), &mut io::stdout());
+ //generate(gen, cmd, cmd.get_name().to_string(), &mut io::stdout());
+ clap_complete::generate(gen, cmd, "pkgcheck", &mut std::io::stdout());
}
fn main() {
let _ = setup_logger(ARGS.no_colors);
// read yaml config file if one is given explicitly or implicitly
- let pkg_replace : FxHashMap<String, String> = read_yaml_config();
+ let pkg_replace: FxHashMap<String, String> = read_yaml_config();
match &ARGS.explain {
None => (),
@@ -716,9 +810,16 @@ fn check_tds_archive_name(tds_zip: &Option<String>) -> Option<String> {
// }
// }
-fn check_tds_archive(pkg_name: &str, tds_zip: &str, hashes: &DupHashes, pkg_replace: &FxHashMap<String, String>) {
+fn check_tds_archive(
+ pkg_name: &str,
+ tds_zip: &str,
+ hashes: &DupHashes,
+ pkg_replace: &FxHashMap<String, String>,
+) {
i0003!(tds_zip);
+ let mut lcnames: FxHashMap<PathBuf, Vec<(PathBuf, FileKind)>> = FxHashMap::default();
+
let dir_entry = Path::new(tds_zip);
let p = get_perms(dir_entry);
if !owner_has(p, 4) || !others_have(p, 4) || x_bit_set(p) {
@@ -731,12 +832,6 @@ fn check_tds_archive(pkg_name: &str, tds_zip: &str, hashes: &DupHashes, pkg_repl
let ut = Utils::new(utils::CheckType::Tds);
- let real_pkg_name = if let Some(real_name) = pkg_replace.get(pkg_name) {
- real_name
- } else {
- pkg_name
- };
-
let tmp_dir = match Builder::new().prefix("pkgcheck").tempdir() {
Ok(tdir) => tdir,
Err(e) => {
@@ -777,7 +872,7 @@ fn check_tds_archive(pkg_name: &str, tds_zip: &str, hashes: &DupHashes, pkg_repl
let path = dir_entry.path().to_path_buf();
let sizeref = &mut sizes;
- sizeref.entry(fsize).or_insert_with(Vec::new).push(path);
+ sizeref.entry(fsize).or_default().push(path);
};
let mut map_files_found = false;
@@ -846,6 +941,15 @@ fn check_tds_archive(pkg_name: &str, tds_zip: &str, hashes: &DupHashes, pkg_repl
}
};
+ // let mtime = meta.modified().unwrap();
+ // if is_future_mtime(*NOW, mtime) {
+ // let diff = mtime.duration_since(*NOW).unwrap();
+ // println!(
+ // "{} has an mtime in the future by {} seconds",
+ // &file_name,
+ // diff.as_secs()
+ // );
+ // }
let ft = get_filetype(&dir_entry);
if let FType::Error(e) = ft {
@@ -853,12 +957,27 @@ fn check_tds_archive(pkg_name: &str, tds_zip: &str, hashes: &DupHashes, pkg_repl
continue;
}
+ // this is the path name without the temporary part
+ // from unpacking the TDS zip archive
let dir_entry_display = if dir_entry.depth() == 0 {
&dir_entry_str[tmp_dir_offset - 1..]
} else {
&dir_entry_str[tmp_dir_offset..]
};
+ let filetype = match ft {
+ FType::Directory => FileKind::Directory,
+ FType::Regular => FileKind::File,
+ FType::Symlink => {
+ e0043!(dir_entry_display);
+ continue;
+ }
+ _ => panic!(
+ "Unexpected file type for {} in zip archive",
+ dir_entry_display
+ ),
+ };
+ register_duplicate_filename(&mut lcnames, dir_entry_display, filetype);
ut.check_for_temporary_file(dir_entry_display);
// In the top level directory of a TDS zip archive
@@ -912,11 +1031,14 @@ fn check_tds_archive(pkg_name: &str, tds_zip: &str, hashes: &DupHashes, pkg_repl
}
// if the path doesn't contain a man page...
- if !dir_entry_str.contains("/man/") {
- let pkg_name_s = format!("/{}/", real_pkg_name);
- // ...then we want to have the package name in the path
- if !dir_entry_str.contains(&pkg_name_s) {
- e0028!(real_pkg_name, dir_entry_display);
+ if !dir_entry_str.contains("/man/") && !dir_entry_str.contains(pkg_name) {
+ if let Some(real_name) = pkg_replace.get(pkg_name) {
+ let pkg_name_s = format!("/{}/", real_name);
+ if !dir_entry_str.contains(&pkg_name_s) {
+ e0028!(pkg_name_s, dir_entry_display);
+ }
+ } else {
+ e0028!(pkg_name, dir_entry_display);
}
}
@@ -952,10 +1074,7 @@ fn check_tds_archive(pkg_name: &str, tds_zip: &str, hashes: &DupHashes, pkg_repl
let hashref = &mut tds_hashes;
scope.execute(move || {
for (size, path, hash) in rx.iter() {
- hashref
- .entry((size, hash))
- .or_insert_with(Vec::new)
- .push(path);
+ hashref.entry((size, hash)).or_default().push(path);
}
});
@@ -974,6 +1093,7 @@ fn check_tds_archive(pkg_name: &str, tds_zip: &str, hashes: &DupHashes, pkg_repl
e0026!(p);
}
}
+ print_casefolding_tds(&lcnames);
}
fn get_extension_from_filename(filename: &str) -> Option<&str> {
@@ -1073,6 +1193,21 @@ enum ReadmeKind {
Symlink(String),
}
+fn register_duplicate_filename(
+ lcnames: &mut FxHashMap<PathBuf, Vec<(PathBuf, FileKind)>>,
+ dir_entry: &str,
+ fk: FileKind,
+) {
+ let lc_dir_entry_str = dir_entry.to_lowercase();
+ if let Some(_dir_name) = filename(dir_entry) {
+ // let lcnref = &mut lcnames;
+ lcnames
+ .entry(PathBuf::from(lc_dir_entry_str))
+ .or_default()
+ .push((PathBuf::from(&dir_entry), fk));
+ }
+}
+
fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
let mut lcnames: FxHashMap<PathBuf, Vec<(PathBuf, FileKind)>> = FxHashMap::default();
@@ -1125,6 +1260,12 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
// above in the definition of dir_entry_str
let file_name = dir_entry.file_name().to_str().unwrap().to_string();
+ let mtime = meta.modified().unwrap();
+ if is_future_mtime(*NOW, mtime) {
+ let diff = mtime.duration_since(*NOW).unwrap();
+ w0011!(&file_name, diff.as_secs(), &utils::format_duration(&diff));
+ }
+
// we check for weird stuff like socket files aso.
let ft = get_filetype(&dir_entry);
if found_unwanted_filetype(dir_entry_str, &ft) {
@@ -1136,6 +1277,7 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
// 1. dealing with symlinks
if ft == FType::Symlink {
match get_symlink(&dir_entry) {
+ // broken symlink
Ok(None) => {
e0010!(&dir_entry_str);
continue;
@@ -1147,22 +1289,18 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
Ok(Some(p)) => {
let pd: String =
p.canonicalize().unwrap().to_string_lossy().to_string();
+ // symlink pointing to outside of the package directory tree
if !pd.starts_with(&root_absolute) {
e0030!(&dir_entry_str, p.display());
continue;
}
- let lc_dir_entry_str = dir_entry_str.to_lowercase();
if let Some(_dir_name) = filename(dir_entry_str) {
- let lcnref = &mut lcnames;
- lcnref
- .entry(PathBuf::from(lc_dir_entry_str))
- .or_insert_with(Vec::new)
- .push((
- PathBuf::from(&dir_entry_str),
- //FileKind::Symlink(&dir_entry_str.into()),
- FileKind::Symlink(pd.clone()),
- ));
+ register_duplicate_filename(
+ &mut lcnames,
+ dir_entry_str,
+ FileKind::Symlink(pd.clone()),
+ );
}
if is_readme(&file_name) {
readme_found = true;
@@ -1184,14 +1322,13 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
// 2. dealing with directories
if ft == FType::Directory {
- let lc_dir_entry_str = dir_entry_str.to_lowercase();
+ //let lc_dir_entry_str = dir_entry_str.to_lowercase();
if let Some(_dir_name) = filename(dir_entry_str) {
- let lcnref = &mut lcnames;
-
- lcnref
- .entry(PathBuf::from(lc_dir_entry_str))
- .or_insert_with(Vec::new)
- .push((PathBuf::from(dir_entry_str), FileKind::Directory));
+ register_duplicate_filename(
+ &mut lcnames,
+ dir_entry_str,
+ FileKind::Directory,
+ );
}
if !owner_has(p, 5) || !others_have(p, 5) {
@@ -1222,7 +1359,7 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
doubleref
.entry(PathBuf::from(file_name))
- .or_insert_with(Vec::new)
+ .or_default()
.push(PathBuf::from(&dir_entry_str));
}
@@ -1258,14 +1395,7 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
));
}
- let lc_dir_entry_str = dir_entry_str.to_lowercase();
-
- let lcnref = &mut lcnames;
-
- lcnref
- .entry(PathBuf::from(lc_dir_entry_str))
- .or_insert_with(Vec::new)
- .push((PathBuf::from(&dir_entry_str), FileKind::File));
+ register_duplicate_filename(&mut lcnames, dir_entry_str, FileKind::File);
}
Err(e) => {
@@ -1293,7 +1423,7 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
let sizeref = &mut sizes;
let path = path.clone();
- sizeref.entry(fsize).or_insert_with(Vec::new).push(path);
+ sizeref.entry(fsize).or_default().push(path);
};
for (path, (meta, _file_name, is_readme)) in file_names.iter() {
let dir_entry_str = match path.to_str() {
@@ -1362,38 +1492,37 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
}
fmm => error!("Should not occur: {} has {:?}", dir_entry_str, fmm),
},
- Some(_) | None => {
- match ft {
- filemagic::Mimetype::Text(LineEnding::Crlf) => {
- e0012!(&dir_entry_str);
- if ARGS.correct_le {
- fix_inconsistent_le(dir_entry_str);
- }
+ Some(_) | None => match ft {
+ filemagic::Mimetype::Text(LineEnding::Crlf) => {
+ e0012!(&dir_entry_str);
+ if ARGS.correct_le {
+ fix_inconsistent_le(dir_entry_str);
}
- filemagic::Mimetype::Text(LineEnding::Cr) => {
- e0037!(&dir_entry_str);
- if ARGS.correct_le {
- fix_inconsistent_le(dir_entry_str);
- }
+ }
+ filemagic::Mimetype::Text(LineEnding::Cr) => {
+ e0037!(&dir_entry_str);
+ if ARGS.correct_le {
+ fix_inconsistent_le(dir_entry_str);
}
- filemagic::Mimetype::Text(LineEnding::Mixed(0, 0, 0)) => (),
- filemagic::Mimetype::Text(LineEnding::Mixed(cr, lf, crlf)) => {
- //println!(">>>{}: {:?} {},{},{}", &dir_entry_str, ft, x, y, z);
- e0038!(&dir_entry_str, cr, lf, crlf);
- if ARGS.correct_le {
- fix_inconsistent_le(dir_entry_str);
- }
+ }
+ filemagic::Mimetype::Text(LineEnding::Mixed(0, 0, 0)) => (),
+ filemagic::Mimetype::Text(LineEnding::Mixed(cr, lf, crlf)) => {
+ e0038!(&dir_entry_str, cr, lf, crlf);
+ if ARGS.correct_le {
+ fix_inconsistent_le(dir_entry_str);
}
- filemagic::Mimetype::Text(LineEnding::Lf) => (),
- fmm => error!("Should not occur: {} has {:?}", dir_entry_str, fmm),
}
- }
+ filemagic::Mimetype::Text(LineEnding::Lf) => (),
+ fmm => error!("Should not occur: {} has {:?}", dir_entry_str, fmm),
+ },
}
}
filemagic::Mimetype::Bom(b) => {
- //println!("{}: {} with BOM detected", dir_entry_str, b.as_ref());
- w0004!(&dir_entry_str, b.as_ref());
+ w0004!(
+ &dir_entry_str,
+ <unicode_bom::Bom as std::convert::AsRef<str>>::as_ref(&b)
+ );
check_and_correct_perms(dir_entry_str, p);
}
filemagic::Mimetype::Binary | filemagic::Mimetype::Script(_) => {
@@ -1462,10 +1591,7 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
let hashref = &mut hashes;
scope.execute(move || {
for (size, path, hash) in rx.iter() {
- hashref
- .entry((size, hash))
- .or_insert_with(DupPath::new)
- .push(path);
+ hashref.entry((size, hash)).or_default().push(path);
}
});
@@ -1489,18 +1615,30 @@ fn check_package(root: &str, tds_zip: &Option<String>) -> Option<DupHashes> {
}
fn print_hardlinks(hashes: &FxHashMap<(u64, u64), Vec<String>>) {
- for ((_devid,inode), eles) in hashes.iter() {
+ for ((_devid, inode), eles) in hashes.iter() {
if eles.len() > 1 {
w0010!(inode);
for hfile in eles.iter() {
info!(" >>> {}", &hfile);
}
+ }
+ }
+}
+fn print_casefolding_tds(hashes: &FxHashMap<PathBuf, Vec<(PathBuf, FileKind)>>) {
+ for (k, eles) in hashes.iter() {
+ // println!("pcf_tds: {:?}, {:?}", k, &eles);
+ if eles.len() == 1 {
+ continue;
}
+ e0042!(k.display());
+
+ for (p, ty) in eles {
+ info!(" >>> {} ({})", p.display(), ty);
+ }
}
}
-
fn print_casefolding(hashes: &FxHashMap<PathBuf, Vec<(PathBuf, FileKind)>>) {
for (k, eles) in hashes.iter() {
//println!("pcf: {:?}, {:?}", k, &eles);
diff --git a/support/pkgcheck/src/messages/errorsd.rs b/support/pkgcheck/src/messages/errorsd.rs
index 39a8ccb357..532f0df371 100644
--- a/support/pkgcheck/src/messages/errorsd.rs
+++ b/support/pkgcheck/src/messages/errorsd.rs
@@ -528,3 +528,29 @@ fonts/map/dvips.
"#
)
}
+
+pub fn e0042d() {
+ error!(
+ r#"
+E0042 -- TDS zip archive: duplicate names when ignoring letter case for files or directories
+
+As there are operating systems which do not distinguish between myfile
+and MYFILE we don't want to have file names in a directory which are the
+same after converting to lower case.
+
+For more details refer to:
+http://mirror.ctan.org/help/ctan/CTAN-upload-addendum.html#filenames
+"#
+ )
+}
+
+pub fn e0043d() {
+ error!(
+ r#"
+E0043 -- Symlink found in TDS zip archive
+
+The TDS zip archive contained a symlink which is not allowed.
+"#
+ )
+}
+
diff --git a/support/pkgcheck/src/messages/fatald.rs b/support/pkgcheck/src/messages/fatald.rs
index 0558612c72..24ece06bad 100644
--- a/support/pkgcheck/src/messages/fatald.rs
+++ b/support/pkgcheck/src/messages/fatald.rs
@@ -107,4 +107,3 @@ rerun.
"#
)
}
-
diff --git a/support/pkgcheck/src/messages/informationd.rs b/support/pkgcheck/src/messages/informationd.rs
index 0eca9849a0..7a87378ad0 100644
--- a/support/pkgcheck/src/messages/informationd.rs
+++ b/support/pkgcheck/src/messages/informationd.rs
@@ -127,4 +127,3 @@ pkgcheck.yml config file like in the following example
"#
)
}
-
diff --git a/support/pkgcheck/src/messages/mod.rs b/support/pkgcheck/src/messages/mod.rs
index 721f27ff7b..265babc961 100644
--- a/support/pkgcheck/src/messages/mod.rs
+++ b/support/pkgcheck/src/messages/mod.rs
@@ -2,11 +2,11 @@ mod errorsd;
mod fatald;
mod informationd;
mod warningsd;
-use colored::Colorize;
use self::errorsd::*;
use self::fatald::*;
use self::informationd::*;
use self::warningsd::*;
+use colored::Colorize;
macro_rules! error_occurred {
() => {
@@ -14,7 +14,6 @@ macro_rules! error_occurred {
};
}
-
macro_rules! no_colors {
() => {
$crate::ARGS.no_colors
@@ -33,7 +32,10 @@ macro_rules! fatal {
macro_rules! f0001 {
() => {{
error_occurred!();
- eprintln!("{} Specify a directory to check (use option -d)", fatal!("F0001"),);
+ eprintln!(
+ "{} Specify a directory to check (use option -d)",
+ fatal!("F0001"),
+ );
}};
}
@@ -42,7 +44,8 @@ macro_rules! f0002 {
error_occurred!();
eprintln!(
"{} Specified directory {} does not exist. Exiting...",
- fatal!("F0002"), $fmt1
+ fatal!("F0002"),
+ $fmt1
);
}};
}
@@ -52,7 +55,8 @@ macro_rules! f0003 {
error_occurred!();
eprintln!(
"{} Specified TDS archive {} does not exist or is no file. Exiting...",
- fatal!("F0003"), $fmt1
+ fatal!("F0003"),
+ $fmt1
);
}};
}
@@ -62,7 +66,8 @@ macro_rules! f0004 {
error_occurred!();
eprintln!(
"{} File {} specified as TDS archive is no zip archive. Exiting...",
- fatal!("F0004"), $fmt1
+ fatal!("F0004"),
+ $fmt1
);
}};
}
@@ -72,7 +77,8 @@ macro_rules! f0005 {
error_occurred!();
eprintln!(
"{} Bad filename {} for the tds.zip archive. Exiting...",
- fatal!("F0005"), $fmt1
+ fatal!("F0005"),
+ $fmt1
);
}};
}
@@ -104,7 +110,8 @@ macro_rules! f0008 {
error_occurred!();
eprintln!(
"{} Config file {} doesn't exist. Exiting...",
- fatal!("F0007"), $fmt
+ fatal!("F0007"),
+ $fmt
);
}};
}
@@ -114,7 +121,9 @@ macro_rules! f0009 {
error_occurred!();
eprintln!(
"{} Error reading config file {}: {} Exiting...",
- fatal!("F0009"), $fmt1, $fmt2
+ fatal!("F0009"),
+ $fmt1,
+ $fmt2
);
}};
}
@@ -124,7 +133,8 @@ macro_rules! f0010 {
error_occurred!();
eprintln!(
"{} Config file's content could not be read properly: {} Exiting...",
- fatal!("F0010"), $fmt
+ fatal!("F0010"),
+ $fmt
);
}};
}
@@ -369,7 +379,10 @@ macro_rules! e0026 {
macro_rules! e0027 {
($fmt1:expr, $fmt2:expr) => {{
error_occurred!();
- error!("{} {}: An I/O error occurred -> {}", "E0027", $fmt1, $fmt2);
+ error!(
+ "{} {}: An I/O error occurred -> {}",
+ "E0027", $fmt1, $fmt2
+ );
}};
}
@@ -494,6 +507,22 @@ macro_rules! e0041 {
}};
}
+macro_rules! e0042 {
+ ($fmt:expr) => {{
+ error_occurred!();
+ error!(
+ "{} TDS zip archive: duplicate names when ignoring letter case for: {}",
+ "E0042", $fmt
+ );
+ }};
+}
+
+macro_rules! e0043 {
+ ($fmt:expr) => {{
+ error_occurred!();
+ error!("{} Symlink {} found in TDS zip archive", "E0043", $fmt);
+ }};
+}
macro_rules! w0001 {
($fmt:expr) => {{
@@ -569,9 +598,7 @@ macro_rules! w0009 {
error_occurred!();
warn!(
"{} Replacing `{} -> {}` with the same from config file",
- "W0009",
- $fmt1,
- $fmt2
+ "W0009", $fmt1, $fmt2
)
}};
}
@@ -582,6 +609,15 @@ macro_rules! w0010 {
}};
}
+macro_rules! w0011 {
+ ($fmt1:expr, $fmt2:expr, $fmt3:expr) => {{
+ warn!(
+ "{} {} has an mtime in the future by {} seconds, or {}",
+ "W0011", $fmt1, $fmt2, $fmt3
+ )
+ }};
+}
+
macro_rules! i0002 {
($fmt:expr) => {{
info!(
@@ -638,11 +674,7 @@ macro_rules! i0009 {
error_occurred!();
warn!(
"{} Updating entry `{} -> {}` with `{} -> {}` from config file",
- "I0009",
- $fmt1,
- $fmt2,
- $fmt1,
- $fmt3
+ "I0009", $fmt1, $fmt2, $fmt1, $fmt3
)
}};
}
@@ -701,6 +733,8 @@ pub fn explains(err: &str) {
"E0039" => e0039d(),
"E0040" => e0040d(),
"E0041" => e0041d(),
+ "E0042" => e0042d(),
+ "E0043" => e0043d(),
// "I0001" => i0001d!(),
"I0001" => i0001d(),
@@ -723,6 +757,7 @@ pub fn explains(err: &str) {
"W0008" => w0008d(),
"W0009" => w0009d(),
"W0010" => w0010d(),
+ "W0011" => w0011d(),
e => eprintln!(
"{} Unknown error code `{}` specified with option -e resp. --explain. Exiting...",
@@ -785,6 +820,8 @@ pub fn explains_all() {
explains("E0039");
explains("E0040");
explains("E0041");
+ explains("E0042");
+ explains("E0043");
explains("I0001");
explains("I0002");
@@ -806,4 +843,5 @@ pub fn explains_all() {
explains("W0008");
explains("W0009");
explains("W0010");
+ explains("W0011");
}
diff --git a/support/pkgcheck/src/messages/warningsd.rs b/support/pkgcheck/src/messages/warningsd.rs
index 6fe37835cf..176fc1d2de 100644
--- a/support/pkgcheck/src/messages/warningsd.rs
+++ b/support/pkgcheck/src/messages/warningsd.rs
@@ -111,7 +111,7 @@ We regard a file as a Windows file if its name ends with:
pub fn w0009d() {
warn!(
r#"
-W0009 -- Replacing -> ` with the same from config file",
+W0009 -- Replacing <pkgname> -> <tpkg> with the same from config file",
This message can only show up if pkgcheck got called with --config.
Indicates that an entry in the pkgcheck config file does the same as the
@@ -131,3 +131,20 @@ displayed
)
}
+pub fn w0011d() {
+ warn!(
+ r#"
+W0011 -- has an mtime in the future by seconds, or <hours, minutes, seconds>
+
+The file has a future modification time. This is most probably caused by
+the archiver tool which doesn't pay attention to the timezone when
+adding the file to the archive
+
+The future time will be displayed in
+
+- seconds
+- and hours, minutes and seconds
+"#
+ )
+}
+
diff --git a/support/pkgcheck/src/recode.rs b/support/pkgcheck/src/recode.rs
index 827da9587b..4d71ac5467 100644
--- a/support/pkgcheck/src/recode.rs
+++ b/support/pkgcheck/src/recode.rs
@@ -74,7 +74,7 @@ pub fn wrong_line_endings2crlf(fname: &str) -> Result<(), io::Error> {
};
// write back
- match hdl_out.write(&another_vec) {
+ match hdl_out.write_all(&another_vec) {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
@@ -102,7 +102,7 @@ pub fn wrong_line_endings2lf(fname: &str) -> Result<(), io::Error> {
};
// write back
- match hdl_out.write(&another_vec) {
+ match hdl_out.write_all(&another_vec) {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
diff --git a/support/pkgcheck/src/utils.rs b/support/pkgcheck/src/utils.rs
index b311453b66..e934c6d9d9 100644
--- a/support/pkgcheck/src/utils.rs
+++ b/support/pkgcheck/src/utils.rs
@@ -5,16 +5,17 @@ use std::fs::read_link;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
use std::path::PathBuf;
+use std::time::Duration;
use walkdir::DirEntry;
use once_cell::sync::Lazy; // 1.3.1
use regex::Regex;
use std::borrow::Cow;
-use std::io;
use std::fs::File;
+use std::io;
use std::process::Command;
-use zip::result::ZipResult;
use std::sync::atomic::Ordering;
+use zip::result::ZipResult;
use std::fs;
@@ -121,7 +122,6 @@ impl Utils {
}
pub fn check_for_temporary_file(&self, dir_entry_str: &str) {
-
static RE: Lazy<Regex> = Lazy::new(regex_temporary_file_endings);
if RE.is_match(dir_entry_str) {
@@ -269,7 +269,7 @@ pub fn temp_file_endings() -> Vec<(String, String)> {
(".xref", "htlatex related"),
(".xray", "dump of \\show output"),
("~", "a file name ending with ~ (tilde) is temporary anyway"),
- // ( ".lyx~", "LyX related backup file" ),
+ ("Thumbs.db", "thumbnails file in Windows"),
];
v.into_iter()
@@ -474,6 +474,16 @@ pub fn dirname(entry: &str) -> Option<&str> {
}
#[test]
+fn test_format_duration() {
+ assert!(format_duration(&Duration::new(5, 0)) == String::from("5sec"));
+ assert!(format_duration(&Duration::new(105, 0)) == String::from("1min 45sec"));
+ assert!(format_duration(&Duration::new(3801, 0)) == String::from("1h 3min 21sec"));
+ assert!(format_duration(&Duration::new(25449, 0)) == String::from("7h 4min 9sec"));
+ assert!(format_duration(&Duration::new(108245, 0)) == String::from("1d 6h 4min 5sec"));
+ assert!(format_duration(&Duration::new(0, 0)) == String::from("0sec"));
+}
+
+#[test]
fn test_filename() {
assert!(filename("/etc/fstab") == Some("fstab"));
assert!(filename("fstab") == Some("fstab"));
@@ -482,6 +492,7 @@ fn test_filename() {
assert!(filename("/") == None);
}
+// We return the right part of a path name if it does not end with a `/`
pub fn filename(entry: &str) -> Option<&str> {
if entry.ends_with('/') {
return None;
@@ -502,3 +513,30 @@ pub fn basename(path: &str) -> Cow<str> {
None => path.into(),
}
}
+
+pub fn format_duration(duration: &Duration) -> String {
+ let seconds = duration.as_secs();
+ let days = seconds / 86400;
+ let hours = (seconds % 86400) / 3600;
+ let minutes = (seconds % 3600) / 60;
+ let seconds = seconds % 60;
+
+ let mut result = String::new();
+ if days > 0 {
+ result.push_str(&format!("{}d ", days));
+ }
+
+ if hours > 0 {
+ result.push_str(&format!("{}h ", hours));
+ }
+
+ if minutes > 0 {
+ result.push_str(&format!("{}min ", minutes));
+ }
+
+ if seconds > 0 || result.is_empty() {
+ result.push_str(&format!("{}sec", seconds));
+ }
+
+ return result.trim().to_string();
+}