mirror of
https://forge.katzen.cafe/schrottkatze/nix-configs.git
synced 2024-11-25 06:18:44 +01:00
jrnl: simplify (or complexify, if you dont like iterators and zero-copy) the parsing
This commit is contained in:
parent
28bebd5aaa
commit
0bf5ed0c76
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -412,6 +412,7 @@ dependencies = [
|
||||||
"owo-colors",
|
"owo-colors",
|
||||||
"petgraph",
|
"petgraph",
|
||||||
"ratatui",
|
"ratatui",
|
||||||
|
"temp-file",
|
||||||
"termsize",
|
"termsize",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -799,6 +800,12 @@ dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "temp-file"
|
||||||
|
version = "0.1.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7f210bda61d003f311d95611d1b68361df8fe8e732c3609f945441bde881321d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "termion"
|
name = "termion"
|
||||||
version = "1.5.6"
|
version = "1.5.6"
|
||||||
|
|
|
@ -13,4 +13,5 @@ markdown = "0.3.0"
|
||||||
owo-colors = "4.0.0"
|
owo-colors = "4.0.0"
|
||||||
petgraph = "0.6.4"
|
petgraph = "0.6.4"
|
||||||
ratatui = "0.26.2"
|
ratatui = "0.26.2"
|
||||||
|
temp-file = "0.1.8"
|
||||||
termsize = "0.1.6"
|
termsize = "0.1.6"
|
||||||
|
|
|
@ -5,8 +5,8 @@ use crate::md::Doc;
|
||||||
|
|
||||||
pub fn list_entries(path: PathBuf) {
|
pub fn list_entries(path: PathBuf) {
|
||||||
let file = fs::read_to_string(path).unwrap();
|
let file = fs::read_to_string(path).unwrap();
|
||||||
let doc = Doc::new(&file);
|
|
||||||
|
|
||||||
|
if let Some(doc) = Doc::new(&file) {
|
||||||
for (i, entry) in doc.entries.into_iter().enumerate() {
|
for (i, entry) in doc.entries.into_iter().enumerate() {
|
||||||
let n = format!("{:>2}", i + 1);
|
let n = format!("{:>2}", i + 1);
|
||||||
let r = format!(". {}", entry.title,);
|
let r = format!(". {}", entry.title,);
|
||||||
|
@ -17,4 +17,8 @@ pub fn list_entries(path: PathBuf) {
|
||||||
|
|
||||||
println!("{}{r}{padding}{}", n.cyan(), l.white())
|
println!("{}{r}{padding}{}", n.cyan(), l.white())
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
eprintln!("Parsing error...");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#![feature(iter_collect_into)]
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use std::{fs, path::PathBuf};
|
use std::{fs, path::PathBuf};
|
||||||
|
|
||||||
|
@ -34,8 +35,7 @@ fn main() {
|
||||||
// TODO: handle btter
|
// TODO: handle btter
|
||||||
let file = fs::read_to_string(cli.s10e_jrnl_file_loc).unwrap();
|
let file = fs::read_to_string(cli.s10e_jrnl_file_loc).unwrap();
|
||||||
|
|
||||||
let doc = Doc::new(&file);
|
let doc = dbg!(Doc::new(&file));
|
||||||
dbg!(doc);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,59 +1,43 @@
|
||||||
use chrono::{DateTime, FixedOffset};
|
use chrono::{DateTime, FixedOffset};
|
||||||
use markdown::{Block, Span};
|
use markdown::{Block, Span};
|
||||||
|
use std::convert::identity;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Doc {
|
pub struct Doc<'src> {
|
||||||
pub title: Vec<Span>,
|
pub entries: Vec<Entry<'src>>,
|
||||||
pub entries: Vec<Entry>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Doc {
|
impl<'src> Doc<'src> {
|
||||||
pub fn new(f: &str) -> Self {
|
// TODO: better parsing errors?
|
||||||
let mut entries = Vec::new();
|
pub fn new(f: &'src str) -> Option<Self> {
|
||||||
let mut doc_title = vec![Span::Text("Journal".to_owned())];
|
let entries = f
|
||||||
let toks = markdown::tokenize(f);
|
.split("\n## ")
|
||||||
let mut current = None;
|
.map(|s| s.split_once("\n"))
|
||||||
|
.skip(1)
|
||||||
|
.filter_map(identity)
|
||||||
|
.map(|(title, content)| (title.split_once(": "), content))
|
||||||
|
.map(|(title, content)| {
|
||||||
|
if let Some((ts, title)) = title {
|
||||||
|
Some(Entry {
|
||||||
|
timestamp: DateTime::parse_from_rfc3339(ts).unwrap(),
|
||||||
|
title,
|
||||||
|
content: content.trim_matches('\n'),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
for tok in toks {
|
entries.iter().all(|it| it.is_some()).then_some(Self {
|
||||||
match tok {
|
entries: entries.into_iter().filter_map(identity).collect(),
|
||||||
Block::Header(title, 1) => doc_title = title,
|
})
|
||||||
Block::Header(entry_title, 2) => {
|
|
||||||
if let Some(cur) = current.take() {
|
|
||||||
entries.push(cur);
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(Span::Text(title)) = entry_title.first() else {
|
|
||||||
eprintln!("Error: Titles should be text.");
|
|
||||||
std::process::exit(1);
|
|
||||||
};
|
|
||||||
|
|
||||||
let (ts, entry_title) = title.split_once(": ").unwrap();
|
|
||||||
let ts = DateTime::parse_from_rfc3339(ts).unwrap();
|
|
||||||
// let ts = PrimitiveDateTime::parse(ts, &DT_FORMAT).unwrap();
|
|
||||||
|
|
||||||
current = Some(Entry {
|
|
||||||
timestamp: ts,
|
|
||||||
title: entry_title.to_owned(),
|
|
||||||
content: Vec::new(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
other => current.as_mut().unwrap().content.push(other),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(cur) = current {
|
|
||||||
entries.push(cur);
|
|
||||||
}
|
|
||||||
|
|
||||||
Self {
|
|
||||||
title: doc_title,
|
|
||||||
entries,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Entry {
|
pub struct Entry<'src> {
|
||||||
pub timestamp: DateTime<FixedOffset>,
|
pub timestamp: DateTime<FixedOffset>,
|
||||||
pub title: String,
|
pub title: &'src str,
|
||||||
pub content: Vec<Block>,
|
pub content: &'src str,
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue