generated from Astrial.org/template
feat: add basic Inventory feature
This commit is contained in:
parent
624e3457e1
commit
072466d285
5 changed files with 147 additions and 1 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -198,6 +198,7 @@ dependencies = [
|
||||||
"clap",
|
"clap",
|
||||||
"iana-time-zone",
|
"iana-time-zone",
|
||||||
"indoc",
|
"indoc",
|
||||||
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
"toml",
|
"toml",
|
||||||
]
|
]
|
||||||
|
|
|
@ -9,5 +9,6 @@ chrono-tz = "0.10.0"
|
||||||
clap = { version = "4.5.23", features = ["derive"] }
|
clap = { version = "4.5.23", features = ["derive"] }
|
||||||
iana-time-zone = "0.1.61"
|
iana-time-zone = "0.1.61"
|
||||||
indoc = "2.0.5"
|
indoc = "2.0.5"
|
||||||
|
regex = "1.11.1"
|
||||||
serde = { version = "1.0.216", features = ["derive"] }
|
serde = { version = "1.0.216", features = ["derive"] }
|
||||||
toml = "0.8.19"
|
toml = "0.8.19"
|
||||||
|
|
|
@ -4,7 +4,8 @@ use clap::{Parser, Subcommand};
|
||||||
use diaryrs::macros::UnwrapOrFatalAble;
|
use diaryrs::macros::UnwrapOrFatalAble;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
/// Overengineered and overly opinionated Rust-based diary keeper and processor
|
/// Overengineered and overly opinionated Rust-based diary keeper and processor.
|
||||||
|
/// Happy journaling!
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
#[command(version, about)]
|
#[command(version, about)]
|
||||||
pub struct Args {
|
pub struct Args {
|
||||||
|
@ -18,6 +19,8 @@ pub enum Commands {
|
||||||
Info,
|
Info,
|
||||||
/// Write diary entry file for today
|
/// Write diary entry file for today
|
||||||
Today,
|
Today,
|
||||||
|
/// Take inventory of the diary entries you have
|
||||||
|
Inventory,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
|
26
src/main.rs
26
src/main.rs
|
@ -1,10 +1,12 @@
|
||||||
mod args;
|
mod args;
|
||||||
|
mod mgmt;
|
||||||
|
|
||||||
use args::{Args, Config};
|
use args::{Args, Config};
|
||||||
use chrono::Datelike;
|
use chrono::Datelike;
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use diaryrs::{macros::UnwrapOrFatalAble, time::Time, util};
|
use diaryrs::{macros::UnwrapOrFatalAble, time::Time, util};
|
||||||
use indoc::formatdoc;
|
use indoc::formatdoc;
|
||||||
|
use mgmt::Inventory;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
|
|
||||||
|
@ -57,6 +59,30 @@ fn main() {
|
||||||
Err(e) => eprintln!("Error: Failed to write diary entry: {e}"),
|
Err(e) => eprintln!("Error: Failed to write diary entry: {e}"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
args::Commands::Inventory => {
|
||||||
|
let config = Config::read().unwrap_or_fatal("Failed to read config file");
|
||||||
|
let time = Time::build().unwrap_or_fatal("Failed to determine time information");
|
||||||
|
let year = time.datetime.format("%Y").to_string();
|
||||||
|
let month_name = time.datetime.format("%B").to_string();
|
||||||
|
let day_ordinal = util::ordinal(time.datetime.day());
|
||||||
|
|
||||||
|
let inventory =
|
||||||
|
Inventory::take(&config).unwrap_or_fatal("Failed to take inventory");
|
||||||
|
|
||||||
|
println!("*** Inventory ***");
|
||||||
|
println!(
|
||||||
|
"For the {day_ordinal} of {month_name}, {year} in {}",
|
||||||
|
time.timezone
|
||||||
|
);
|
||||||
|
println!();
|
||||||
|
println!("You have {} diary entries so far.", inventory.entries);
|
||||||
|
println!(
|
||||||
|
"You have written {} words so far, with an average of {} words per entry.",
|
||||||
|
inventory.word_count, inventory.avg_word_count
|
||||||
|
);
|
||||||
|
println!();
|
||||||
|
println!("*** Happy journaling! ***")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
115
src/mgmt.rs
Normal file
115
src/mgmt.rs
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
use diaryrs::{macros::UnwrapOrFatalAble, time::Time};
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
|
use crate::args::Config;
|
||||||
|
|
||||||
|
use std::{fs, path};
|
||||||
|
|
||||||
|
// Management for existing diary entries
|
||||||
|
|
||||||
|
/// Overall stats and information about existing diary entries
|
||||||
|
///
|
||||||
|
/// Call `::take` to construct an `Inventory` struct from diary entires on disk
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Inventory {
|
||||||
|
/// Number of entries in the diary
|
||||||
|
pub entries: usize,
|
||||||
|
|
||||||
|
/// Total word count of all entries
|
||||||
|
///
|
||||||
|
/// Does not include YAML front matter or comments
|
||||||
|
pub word_count: u64,
|
||||||
|
|
||||||
|
/// Average word count: simply `word_count / entries` rounded to the nearest integer
|
||||||
|
pub avg_word_count: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Inventory {
|
||||||
|
/// Take inventory of the diary entries
|
||||||
|
///
|
||||||
|
/// Reads the diary entries from disks and produces a useful
|
||||||
|
/// `Inventory` struct with stats and information about the entries.
|
||||||
|
pub fn take(config: &Config) -> Result<Self, Box<dyn std::error::Error>> {
|
||||||
|
// Take directory listing
|
||||||
|
let dir = fs::read_dir(&config.base_path)?;
|
||||||
|
|
||||||
|
let md_paths: Vec<_> = dir
|
||||||
|
.filter_map(|entry| entry.ok())
|
||||||
|
.flat_map(|entry| recurse_paths_md(entry))
|
||||||
|
.flatten()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let word_count = word_count_mds(&md_paths)?;
|
||||||
|
let entries_count = md_paths.len();
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
entries: entries_count,
|
||||||
|
word_count,
|
||||||
|
avg_word_count: (word_count as f64 / entries_count as f64).round(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively finds every Markdown (*.md) file in a directory
|
||||||
|
/// and its subdirectories
|
||||||
|
///
|
||||||
|
/// Returns a vector of `PathBuf`s to each Markdown file or a boxed `Error`.
|
||||||
|
fn recurse_paths_md(dir: fs::DirEntry) -> Result<Vec<path::PathBuf>, Box<dyn std::error::Error>> {
|
||||||
|
let mut paths = Vec::new();
|
||||||
|
let current_path = dir.path();
|
||||||
|
let path_type = dir.file_type()?;
|
||||||
|
|
||||||
|
if path_type.is_dir() {
|
||||||
|
// List dir, then run this function on each entry
|
||||||
|
// Add anything returned to the paths vector
|
||||||
|
let dir = fs::read_dir(¤t_path)?;
|
||||||
|
paths.extend(
|
||||||
|
dir.filter_map(|entry| entry.ok())
|
||||||
|
.flat_map(|entry: fs::DirEntry| recurse_paths_md(entry))
|
||||||
|
.flatten(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if path_type.is_file() {
|
||||||
|
// Check if file is an md file
|
||||||
|
if let Some(ext) = current_path.extension() {
|
||||||
|
if ext == "md" {
|
||||||
|
paths.push(current_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(paths)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn word_count_mds(paths: &Vec<path::PathBuf>) -> Result<u64, Box<dyn std::error::Error>> {
|
||||||
|
// temp regexes
|
||||||
|
let re_titles = Regex::new(r"(?m)^#{1,6} .+")
|
||||||
|
.unwrap_or_fatal("Failed to compile title regex. Something is very wrong!");
|
||||||
|
let re_comments = Regex::new(r"(?s-m)<!---?.+-->")
|
||||||
|
.unwrap_or_fatal("Failed to compile comment regex. Something is very wrong!");
|
||||||
|
let re_images = Regex::new(r"!\[.*\]\(.+\)")
|
||||||
|
.unwrap_or_fatal("Failed to compile image regex. Something is very wrong!");
|
||||||
|
|
||||||
|
let mut word_count = 0;
|
||||||
|
|
||||||
|
for path in paths {
|
||||||
|
let contents = fs::read_to_string(&path)?;
|
||||||
|
|
||||||
|
// Cut YAML header and comments
|
||||||
|
let contents = &re_titles.replace_all(
|
||||||
|
contents
|
||||||
|
.split("---")
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.pop()
|
||||||
|
.ok_or("No content found")?,
|
||||||
|
"",
|
||||||
|
);
|
||||||
|
let contents = &re_comments.replace_all(contents, "");
|
||||||
|
let contents = &re_images.replace_all(contents, "");
|
||||||
|
|
||||||
|
word_count += contents.split_whitespace().count();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(word_count as u64)
|
||||||
|
}
|
Loading…
Reference in a new issue