Skip to content

Commit

Permalink
list pages: _index.md
Browse files Browse the repository at this point in the history
  • Loading branch information
winks committed Jan 16, 2020
1 parent b4d8b4f commit 74e4473
Showing 1 changed file with 110 additions and 13 deletions.
123 changes: 110 additions & 13 deletions src/main.rs
Expand Up @@ -4,12 +4,14 @@ extern crate serde;
extern crate tera;
extern crate walkdir;

use chrono::DateTime;
use pulldown_cmark::{Parser, html};
use serde::Deserialize;
use tera::{Context, Tera};
use toml::value::Datetime;
use toml::value;
use walkdir::WalkDir;

use std::collections::HashMap;
use std::fs;
use std::path::Path;
use std::io::prelude::*;
Expand All @@ -22,13 +24,24 @@ struct SiteConfig {

#[derive(Deserialize)]
struct FrontMatter {
date: Datetime,
date: value::Datetime,
description: Option<String>,
draft: Option<bool>,
title: String,
template: Option<String>,
}

struct ParsedPage {
content: String,
date: String,
link: String,
section: String,
section_index: bool,
template: String,
title: String,
vars: Context,
}

fn main() -> Result<(), Box<dyn std::error::Error>> {
// @TODO RSS
// @TODO sass and or other stuff to preprocess
Expand Down Expand Up @@ -64,6 +77,8 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
}
};

let mut parsed = vec![];

// handle static files
for entry in WalkDir::new(dir_static)
.into_iter()
Expand All @@ -84,7 +99,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
}

// figure out which sections we have
let mut content_sections = vec![];
let mut content_sections = HashMap::new();
for entry in WalkDir::new(dir_content)
.into_iter()
.filter_map(Result::ok) {
Expand All @@ -95,7 +110,8 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("d:d: {}", path.display());
fs::create_dir_all(pp0.join(path))?;
let x = path.to_str().unwrap();
content_sections.push(String::from(x));
let v : Vec<ParsedPage> = Vec::new();
content_sections.insert(String::from(x), v);
}
}

Expand Down Expand Up @@ -139,7 +155,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
if is_draft {
continue;
}
let dtx = chrono::DateTime::parse_from_rfc3339(&value.date.to_string()).unwrap();
let dtx = DateTime::parse_from_rfc3339(&value.date.to_string()).unwrap();
page_vars.insert("Date", &dtx.format("%a %b %d %Y").to_string());
match value.description {
None => (),
Expand All @@ -158,12 +174,12 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {

// convert to markdown
let parser = Parser::new(parts[2]);
let mut html = String::new();
html::push_html(&mut html, parser);
page_vars.insert("content", &html);
let mut html_from_md = String::new();
html::push_html(&mut html_from_md, parser);
page_vars.insert("content", &html_from_md);

// find out if a section template is needed
for sec in &content_sections {
for (sec, _) in content_sections.iter() {
let mut sc = String::from(sec);
sc.push_str("/");
if path.starts_with(&sc) {
Expand All @@ -182,24 +198,105 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {

let pf;
let pp1 = pp0.join(path);
let mut skip_write = false;
let mut section_index = false;
if pp1.to_str().unwrap() == "./public/_index.md" {
// special case for the /index.html
pf = pp1.with_file_name("index.html");
page_tpl = "index.html".to_string();
page_section = "indexindex".to_string();
} else if path.to_str().unwrap().ends_with("/_index.md") {
// use _index.md for a section's /section/index.html
pf = pp1.with_file_name("index.html");
page_tpl = page_section;
page_tpl = page_section.clone();
page_tpl.push_str("_index.html");
skip_write = true;
section_index = true;
} else {
let pd = pp1.with_extension("");
pf = pd.join("index.html");
fs::create_dir_all(pd)?;
}
println!("d:f: {}", pf.strip_prefix(pp0).unwrap().display());
let rv = tera.render(&page_tpl, &page_vars)?;
let mut ofile = fs::File::create(pf)?;
println!("d:f: {} {}", pf.strip_prefix(pp0).unwrap().display(), !skip_write);
if !skip_write {
let rv = tera.render(&page_tpl, &page_vars)?;
let mut ofile = fs::File::create(pf.clone())?;
ofile.write_all(&rv.trim().as_bytes())?;
}

let parsed_page = ParsedPage {
title: value.title,
date: value.date.to_string(),
link: pf.strip_prefix(pp0).unwrap().with_file_name("").to_str().unwrap().to_string(),
content: html_from_md,
section: page_section,
template: page_tpl,
vars: page_vars,
section_index: section_index,
};
parsed.push(parsed_page);
}

for p in parsed {
if p.section == "indexindex" {
// @TODO templating
//println!("i {} {} {}", p.date, p.title, p.link);
} else if p.section.len() > 0 {
//if p.section_index { continue; }
//println!("{} _{}_ {} {}", p.date, p.title, p.link, p.template);
content_sections.get_mut(&p.section).unwrap().push(p);
} else {
//println!(" {} {} {}", p.date, p.section, p.title);
}
}

let mut prev_year = "0";
for (sec, pp) in content_sections.iter_mut() {
if pp.len() < 1 { continue; }
let mut out = String::new();
(*pp).sort_by(|a, b| b.date.cmp(&a.date));
let mut idx = 0;
let mut pi_tpl = String::new();
let mut pi_vars = Context::new();
for p in pp {
if p.section_index {
pi_tpl = p.template.clone();
pi_vars = p.vars.clone();
continue;
}
if idx > 0 {
out.push_str("</ul>\n");
}
let yr = &p.date[0..4];
if yr != prev_year {
out.push_str("\n<h3>");
out.push_str(yr);
out.push_str("</h3>\n");
out.push_str("<ul class=\"posts\">\n");
}
let dtx = DateTime::parse_from_rfc3339(&p.date.to_string()).unwrap();
out.push_str(" <li>\n <time class=\"pull-right post-list\">");
out.push_str(&dtx.format("%Y-%m-%d").to_string());
out.push_str("</time>\n <span><a href=\"");
out.push_str(&config.baseurl);
out.push_str("/");
out.push_str(&p.link);
out.push_str("\">");
out.push_str(&p.title);
out.push_str("</a></span>\n </li>\n");
prev_year = yr;
idx += 1;
}
out.push_str("</ul>");
//@TODO missing _index.md

pi_vars.insert("content", &out);
let rv = tera.render(&pi_tpl, &pi_vars)?;
let pf = pp0.join(sec).join("index.html");
let mut ofile = fs::File::create(pf.clone())?;
ofile.write_all(&rv.trim().as_bytes())?;
println!("d:f: {} i", pf.strip_prefix(pp0).unwrap().display());
}

Ok(())
}

0 comments on commit 74e4473

Please sign in to comment.