summaryrefslogtreecommitdiff
path: root/src/main.rs
diff options
context:
space:
mode:
authorAlejandro Sior <aho@sior.be>2022-09-11 12:17:58 +0200
committerAlejandro Sior <aho@sior.be>2022-09-11 12:17:58 +0200
commit55bcd098059fca8d200abadc165ce04d6b54bc0a (patch)
treef2aa990ef5cacdc4315b2e03f5a4aba0f036860a /src/main.rs
add basic site
Diffstat (limited to 'src/main.rs')
-rw-r--r--src/main.rs147
1 files changed, 147 insertions, 0 deletions
diff --git a/src/main.rs b/src/main.rs
new file mode 100644
index 0000000..999cf9d
--- /dev/null
+++ b/src/main.rs
@@ -0,0 +1,147 @@
+
+use core::convert::Infallible;
+
+use std::{
+ collections::HashMap,
+ str::FromStr,
+ fs,
+ path::{Path, PathBuf},
+ ffi::OsStr
+};
+
+pub struct Lexicon {
+ pub word: String,
+ pub related: Vec<String>
+}
+
+impl FromStr for Lexicon {
+ type Err = Infallible;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let parts = s.split(" ");
+
+ let tag = parts.clone().take_while(|&s| s != "|").nth(0).unwrap_or("").to_string();
+ let rest = parts.skip_while(|&s| s != "|").skip(1).map(|s| s.to_string()).collect::<Vec<String>>();
+
+ Ok(Lexicon {
+ word: tag,
+ related: rest
+ })
+ }
+}
+
+fn backlink<'a, 'b>(v: &'a Vec<Lexicon>, tag: &'b str) -> Option<&'a Lexicon> {
+ v.iter().find(|l| {
+ l.related.iter().find(|&s| s == tag).is_some()
+ })
+}
+
+fn find<'a, 'b>(v: &'a Vec<Lexicon>, tag: &'b str) -> Option<&'a Lexicon> {
+ v.iter().find(
+ |l| l.word == tag
+ )
+}
+
+fn links<'a, 'b>(v: &'a Vec<Lexicon>, tag: &'b str) -> Vec<&'a Lexicon> {
+ let l = match find(v, tag) {
+ Some(l) => l,
+ None => return Vec::new()
+ };
+
+ l.related.iter().map(|g| find(v, g)).filter(|g| g.is_some()).map(|g| g.unwrap()).collect::<Vec<&'a Lexicon>>()
+}
+
+fn create_links<'a, 'b>(v: &'a Vec<Lexicon>, tag: &'b str) -> Vec<&'a Lexicon> {
+ let mut ret = Vec::<&'a Lexicon>::new();
+
+ let mut b = tag;
+
+ while {
+ let lex = find(v, b).unwrap();
+
+ ret.push(lex);
+ if let Some(prev) = backlink(v, b) {
+ b = prev.word.as_str();
+ true
+ } else {
+ false
+ }
+ } {}
+
+ ret
+}
+
+fn create_nav<'a, 'b>(v: &'a Vec<&'b Lexicon>) -> String {
+ let mut html = Vec::<String>::new();
+
+
+ let v = v.iter().rev().take(3).collect::<Vec<&&'b Lexicon>>();
+
+ let iter = v.iter();
+ let count = v.len();
+
+ for (i, level) in iter.enumerate() {
+ html.push("<ul>".into());
+
+ let next_word = if i < count - 1 {
+ &v[i + 1].word
+ } else {
+ ""
+ };
+
+ for (j, related) in level.related.iter().enumerate() {
+ let class = if related == next_word {
+ "nav-intended"
+ } else {
+ "nav-not-intended"
+ };
+
+ html.push(format!("<li><a href=\"/site/{}\" class=\"{}\">{}</a></li>", related, class, related));
+ }
+
+
+ html.push("</ul>".into());
+ }
+
+ html.join("")
+}
+
+fn process(lexicon: &Vec<Lexicon>, template: &String, src: &Path) -> String {
+ let dest = template.clone();
+
+ let stem = src.file_stem().and_then(OsStr::to_str).unwrap();
+ let ext = src.extension().and_then(OsStr::to_str).unwrap();
+
+ let src = fs::read_to_string(src).unwrap();
+
+ let m = create_links(lexicon, stem);
+ let nav = create_nav(&m);
+
+ let dest = dest.replace("{{ nav }}", &nav);
+ let dest = dest.replace("{{ article }}", &src);
+ dest
+}
+
+fn main() {
+ let lexicon = fs::read_to_string("lexicon").expect("Missing lexicon");
+ let lexicon = lexicon.split("\n").map(|s| Lexicon::from_str(s).unwrap()).collect::<Vec<Lexicon>>();
+
+ let template = fs::read_to_string("index.html").expect("Missing index.html");
+
+ let paths = fs::read_dir("source").unwrap();
+ for path in paths {
+ let mut dest = PathBuf::from("site");
+
+ // Get the source of the path
+ let src = path.unwrap().path();
+ let name = src.file_name().and_then(OsStr::to_str).unwrap();
+ let stem = src.file_stem().and_then(OsStr::to_str).unwrap();
+
+ let site = process(&lexicon, &template, &src);
+
+ // Build dest
+ dest.push(stem);
+
+ fs::write(&dest, &site);
+ }
+}