Initial commit

main
Pantonshire 4 years ago
commit a41218e0d9

3
.gitignore vendored

@ -0,0 +1,3 @@
.DS_Store
/target/
/config.kdl

1611
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -0,0 +1,6 @@
[workspace]
members = [
"blog_server",
"utils/css_gen"
]

@ -0,0 +1,32 @@
[package]
name = "blog_server"
version = "0.1.0"
edition = "2021"
[dependencies]
# My own utilities library
libshire = { git = "https://github.com/pantonshire/libshire" }
# Async runtime for Axum
tokio = { version = "1", features = ["full"] }
# Web server framework
axum = "0.5"
# Middleware for the web server
tower = { version = "0.4", features = ["limit"] }
tower-http = { version = "0.3", features = ["fs", "trace"] }
# Compile-time HTTP templating
maud = "0.23"
# KDL parsing
knuffel = "2"
# CommonMark parsing
pulldown-cmark = "0.9"
# Syntax highlighting
syntect = "4"
# Filesystem event watcher
notify = "4"
# Time library
chrono = "0.4"
# Logging for observability
tracing = "0.1"
tracing-subscriber = "0.3"
# Pretty errors
miette = { version = "4", features = ["fancy"] }

@ -0,0 +1,67 @@
use maud::{html, Markup, PreEscaped};
use syntect::html::{ClassedHTMLGenerator, ClassStyle};
use syntect::parsing::SyntaxSet;
use syntect::util::LinesWithEndings;
const CLASS_STYLE: ClassStyle = ClassStyle::SpacedPrefixed { prefix: "cb_" };
pub struct CodeBlockRenderer {
syntax_set: SyntaxSet,
}
impl CodeBlockRenderer {
pub fn new() -> Self {
// Load Syntect's default syntax set from Sublime syntax definitions embedded in the
// binary.
let default_syntax_set = SyntaxSet::load_defaults_newlines();
Self::new_with_syntax_set(default_syntax_set)
}
pub fn new_with_syntax_set(syntax_set: SyntaxSet) -> Self {
Self {
syntax_set,
}
}
pub fn render(&self, lang: &str, source: &str) -> Markup {
const CONTEXT_DELIM: &str = "@@";
// Grab the optional context information between @@s from the first line of the code block.
let (context, source) = source.split_once('\n')
.and_then(|(context, source)| context
.trim()
.strip_prefix(CONTEXT_DELIM)
.and_then(|context| context.strip_suffix(CONTEXT_DELIM))
.map(|context| (Some(context.trim()), source)))
.unwrap_or((None, source));
// Search the syntax set for the syntax definition for the language specified for the code
// block (after the triple backtick), and default to plaintext if no syntax definition is
// found.
let syntax = self.syntax_set
.find_syntax_by_token(lang)
.unwrap_or_else(|| self.syntax_set.find_syntax_plain_text());
let mut html_gen = ClassedHTMLGenerator::new_with_class_style(
syntax,
&self.syntax_set,
CLASS_STYLE
);
for line in LinesWithEndings::from(source) {
html_gen.parse_html_for_line_which_includes_newline(line);
}
let html_out = html_gen.finalize();
//TODO: show context & language
html! {
pre .codeblock {
code {
(PreEscaped(html_out))
}
}
}
}
}

@ -0,0 +1,35 @@
use std::{
path::Path,
sync::mpsc,
time::Duration,
};
use miette::{IntoDiagnostic, WrapErr};
use notify::{
DebouncedEvent,
FsEventWatcher,
RecursiveMode,
Watcher,
watcher,
};
use tracing::info;
pub fn start_watching(
tx: mpsc::Sender<DebouncedEvent>,
watch_path: &Path
) -> miette::Result<FsEventWatcher>
{
let mut watcher = watcher(tx, Duration::from_secs(2))
.into_diagnostic()
.wrap_err("Failed to create filesystem watcher")?;
// Watch the path in non-recursive mode, so events are not generated for nodes in
// sub-directories.
watcher.watch(watch_path, RecursiveMode::NonRecursive)
.into_diagnostic()
.wrap_err_with(|| format!("Failed to watch directory {}", watch_path.to_string_lossy()))?;
info!(path = %watch_path.to_string_lossy(), "Watching directory");
Ok(watcher)
}

@ -0,0 +1,211 @@
use std::borrow::Cow;
use std::fmt::{self, Write};
use axum::response::{IntoResponse, Response};
use axum::http::{self, StatusCode};
use maud::{html, Markup, Render, Escaper, DOCTYPE};
pub struct HtmlResponse {
status: StatusCode,
title: Cow<'static, str>,
head: Option<Markup>,
body: Option<Markup>,
crawler_hints: CrawlerHints,
}
impl HtmlResponse {
pub fn new() -> Self {
Self {
status: StatusCode::OK,
title: Cow::Borrowed("untitled"),
head: None,
body: None,
crawler_hints: CrawlerHints::restrictive(),
}
}
pub fn with_status(self, status: StatusCode) -> Self {
Self { status, ..self }
}
pub fn with_title(self, title: Cow<'static, str>) -> Self {
Self { title, ..self }
}
pub fn with_title_static(self, title: &'static str) -> Self {
self.with_title(Cow::Borrowed(title))
}
pub fn with_title_owned(self, title: String) -> Self {
self.with_title(Cow::Owned(title))
}
pub fn with_head(self, head: Markup) -> Self {
Self { head: Some(head), ..self }
}
pub fn with_body(self, body: Markup) -> Self {
Self { body: Some(body), ..self }
}
pub fn with_crawler_hints(self, crawler_hints: CrawlerHints) -> Self {
Self { crawler_hints, ..self }
}
pub fn with_crawler_restrictive(self) -> Self {
self.with_crawler_hints(CrawlerHints::restrictive())
}
pub fn with_crawler_permissive(self) -> Self {
self.with_crawler_hints(CrawlerHints::permissive())
}
}
impl Default for HtmlResponse {
fn default() -> Self {
Self::new()
}
}
impl IntoResponse for HtmlResponse {
fn into_response(self) -> Response {
let html_doc = html! {
(DOCTYPE)
html {
head {
meta charset="utf-8";
meta name="robots" content=(self.crawler_hints);
title { (self.title) }
@if let Some(head) = self.head {
(head)
}
}
body {
@if let Some(body) = self.body {
(body)
}
}
}
};
let mut response = (self.status, html_doc.into_string())
.into_response();
response.headers_mut()
.append("Content-Type", http::HeaderValue::from_static("text/html; charset=utf-8"));
response
}
}
#[derive(Clone, Copy, Debug)]
pub struct CrawlerHints {
index: bool,
follow: bool,
archive: bool,
snippet: bool,
image_index: bool,
}
impl CrawlerHints {
pub const fn restrictive() -> Self {
Self {
index: false,
follow: false,
archive: false,
snippet: false,
image_index: false,
}
}
pub const fn permissive() -> Self {
Self {
index: true,
follow: true,
archive: true,
snippet: true,
image_index: true,
}
}
pub const fn with_index(self, index: bool) -> Self {
Self { index, ..self }
}
pub const fn with_follow(self, follow: bool) -> Self {
Self { follow, ..self }
}
pub const fn with_archive(self, archive: bool) -> Self {
Self { archive, ..self }
}
pub const fn with_snippet(self, snippet: bool) -> Self {
Self { snippet, ..self }
}
pub const fn with_image_index(self, image_index: bool) -> Self {
Self { image_index, ..self }
}
fn index_str(self) -> &'static str {
if self.index {
"index"
} else {
"noindex"
}
}
fn follow_str(self) -> &'static str {
if self.follow {
"follow"
} else {
"nofollow"
}
}
fn archive_strs(self) -> Option<[&'static str; 2]> {
if self.archive {
None
} else {
Some(["noarchive", "nocache"])
}
}
fn snippet_str(self) -> Option<&'static str> {
if self.snippet {
None
} else {
Some("nosnippet")
}
}
fn image_index_str(self) -> Option<&'static str> {
if self.image_index {
None
} else {
Some("noimageindex")
}
}
fn write_meta_list_to<W: Write>(self, mut buf: W) -> fmt::Result {
write!(buf, "{},{}", self.index_str(), self.follow_str())?;
if let Some([archive_str, cache_str]) = self.archive_strs() {
write!(buf, ",{},{}", archive_str, cache_str)?;
}
if let Some(snippet_str) = self.snippet_str() {
write!(buf, ",{}", snippet_str)?;
}
if let Some(image_index_str) = self.image_index_str() {
write!(buf, ",{}", image_index_str)?;
}
Ok(())
}
}
impl Render for CrawlerHints {
fn render_to(&self, buf: &mut String) {
let escaper = Escaper::new(buf);
let _result = self.write_meta_list_to(escaper);
}
}

@ -0,0 +1,221 @@
mod codeblock;
mod fs_watcher;
mod handlers;
mod html_response;
mod post;
mod posts_store;
mod render;
use std::{env, fs, io, path::PathBuf, thread};
use axum::{
{routing::{get, get_service}, Router},
extract::{Extension, Path},
response::{IntoResponse, Response},
handler::Handler,
http::StatusCode
};
use libshire::convert::infallible_elim;
use maud::html;
use miette::{IntoDiagnostic, Context};
use tower::{
limit::ConcurrencyLimitLayer,
ServiceExt,
};
use tower_http::{services::ServeDir, trace::TraceLayer};
use tracing::info;
use codeblock::CodeBlockRenderer;
use html_response::HtmlResponse;
use posts_store::ConcurrentPostsStore;
use render::Renderer;
#[derive(knuffel::Decode)]
struct Config {
#[knuffel(child, unwrap(argument))]
bind: String,
#[knuffel(child, unwrap(argument))]
posts_dir: PathBuf,
#[knuffel(child, unwrap(argument))]
static_dir: PathBuf,
#[knuffel(child, unwrap(argument))]
concurrency_limit: usize,
}
fn main() -> miette::Result<()> {
tracing_subscriber::fmt::init();
// Load the configuration from the KDL config file specified by the first command-line
// argument.
let config = {
let config_path = env::args().nth(1)
.ok_or_else(|| miette::Error::msg("No config file specified"))?;
info!(path = %config_path, "Loading config");
let contents = fs::read_to_string(&config_path)
.into_diagnostic()
.wrap_err_with(|| format!("Failed to read config file {}", config_path))?;
knuffel::parse::<Config>(&config_path, &contents)
.wrap_err_with(|| format!("Failed to parse config file {}", config_path))?
};
// Create the data structure used to store the rendered posts. This uses an `Arc` internally,
// so clones will point to the same underlying data.
let posts_store = ConcurrentPostsStore::new();
let code_renderer = CodeBlockRenderer::new();
// Create the post renderer and the mpsc channel that will be used to communicate with it.
let (renderer, tx) = Renderer::new(
posts_store.clone(),
code_renderer,
config.posts_dir.clone()
);
// Dropping the watcher stops its thread, so keep it alive until `main` returns.
let _watcher = fs_watcher::start_watching(tx, &config.posts_dir)?;
thread::spawn(move || {
renderer.handle_events();
});
info!("Started renderer thread");
// To run the web server, we need to be in an async context, so create a new Tokio runtime and
// pass control to it.
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.into_diagnostic()
.wrap_err("Failed to create async runtime")?
.block_on(run(config, posts_store))
}
async fn run(config: Config, posts_store: ConcurrentPostsStore) -> miette::Result<()> {
let static_service = get_service(ServeDir::new(&config.static_dir)
.fallback(handle_fallback
.into_service()
.map_err(infallible_elim::<io::Error>)))
.handle_error(handle_static_io_error);
let router = Router::new()
.route("/", get(handle_index))
.route("/posts/:post_id", get(handle_post_page))
.nest("/static", static_service)
.fallback(handle_fallback.into_service())
.layer(ConcurrencyLimitLayer::new(config.concurrency_limit))
.layer(TraceLayer::new_for_http())
.layer(Extension(posts_store));
let bind_address = &config.bind
.parse()
.into_diagnostic()
.wrap_err_with(|| format!("Failed to parse socket address \"{}\"", config.bind))?;
info!(address = %bind_address, "Starting server");
axum::Server::try_bind(bind_address)
.into_diagnostic()
.wrap_err_with(|| format!("Failed to bind {}", bind_address))?
.serve(router.into_make_service())
.await
.into_diagnostic()
.wrap_err("Fatal error while running the server")
}
async fn handle_fallback() -> Error {
Error::NotFound
}
async fn handle_static_io_error(_err: io::Error) -> Error {
Error::Internal
}
async fn handle_index(Extension(posts): Extension<ConcurrentPostsStore>) -> HtmlResponse {
HtmlResponse::new()
.with_title_static("Placeholder title")
.with_crawler_permissive()
.with_body(html! {
h1 { "Here is my great heading" }
p { "Hello world" }
ul {
@for post in posts.read().await.iter_by_created().rev() {
li {
a href={ "/posts/" (post.id_str()) } {
(post.title())
};
}
}
}
})
}
async fn handle_post_page(
Path(post_id): Path<String>,
Extension(posts): Extension<ConcurrentPostsStore>
) -> Result<HtmlResponse, Error>
{
let post = posts.get(&post_id)
.await
.ok_or(Error::NotFound)?;
Ok(HtmlResponse::new()
.with_crawler_permissive()
.with_title_owned(post.title().to_owned())
.with_head(html! {
link href="/static/style/code.css" rel="stylesheet";
})
.with_body(html! {
h1 { (post.title()) }
p { "by " (post.author()) }
article {
(post.html())
}
}))
}
// TODO: store diagnostic information in Error struct which is output to trace
#[derive(Debug)]
enum Error {
Internal,
NotFound,
}
impl Error {
fn status_code(&self) -> StatusCode {
match self {
Error::Internal => StatusCode::INTERNAL_SERVER_ERROR,
Error::NotFound => StatusCode::NOT_FOUND,
}
}
}
impl IntoResponse for Error {
fn into_response(self) -> Response {
let status_code = self.status_code();
// Create a string buffer containing the full error text, e.g. "404 Not Found".
let status_text = {
let status_code_str = status_code.as_str();
let reason = status_code.canonical_reason();
let mut buf = String::with_capacity(
status_code_str.len() + reason.map(|reason| reason.len() + 1).unwrap_or(0));
buf.push_str(status_code_str);
if let Some(reason) = reason {
buf.push(' ');
buf.push_str(reason);
}
buf
};
HtmlResponse::new()
.with_status(status_code)
.with_body(html! {
p { (status_text) }
})
.with_title_owned(status_text)
.into_response()
}
}

@ -0,0 +1,295 @@
use std::{borrow, error, fmt, ops};
use chrono::{DateTime, Utc};
use libshire::strings::ShString22;
use maud::{Markup, PreEscaped};
use crate::codeblock::CodeBlockRenderer;
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct PostId(ShString22);
impl PostId {
pub fn from_file_name(file_name: &str) -> Option<Self> {
const POST_FILE_EXTENSION: &str = ".kdl.md";
fn is_invalid_char(c: char) -> bool {
c == '/' || c == '\\' || c == '.'
}
let prefix = file_name
.strip_suffix(POST_FILE_EXTENSION)?;
if prefix.contains(is_invalid_char) {
return None;
}
Some(Self(ShString22::new_from_str(prefix)))
}
}
impl ops::Deref for PostId {
type Target = str;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
impl ops::DerefMut for PostId {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut *self.0
}
}
impl AsRef<str> for PostId {
fn as_ref(&self) -> &str {
self
}
}
impl AsMut<str> for PostId {
fn as_mut(&mut self) -> &mut str {
self
}
}
impl borrow::Borrow<str> for PostId {
fn borrow(&self) -> &str {
self
}
}
impl borrow::BorrowMut<str> for PostId {
fn borrow_mut(&mut self) -> &mut str {
self
}
}
impl fmt::Display for PostId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
pub struct Post {
id: PostId,
title: String,
author: String,
html: Markup,
tags: Vec<ShString22>,
created: DateTime<Utc>,
updated: DateTime<Utc>,
}
impl Post {
pub fn id_str(&self) -> &str {
&self.id
}
pub fn id(&self) -> &PostId {
&self.id
}
pub fn title(&self) -> &str {
&self.title
}
pub fn author(&self) -> &str {
&self.author
}
pub fn html(&self) -> PreEscaped<&str> {
PreEscaped(&self.html.0)
}
pub fn tags(&self) -> &[ShString22] {
&self.tags
}
pub fn created(&self) -> DateTime<Utc> {
self.created
}
pub fn updated(&self) -> DateTime<Utc> {
self.updated
}
pub fn parse(
code_renderer: &CodeBlockRenderer,
post_id: PostId,
file_name: &str,
created: DateTime<Utc>,
updated: DateTime<Utc>,
source: &str,
) -> Result<Self, ParseError>
{
let mdpost = MdPost::parse(file_name, source)?;
Ok(Self::from_mdpost(post_id, code_renderer, created, updated, mdpost))
}
fn from_mdpost(
id: PostId,
code_renderer: &CodeBlockRenderer,
created: DateTime<Utc>,
updated: DateTime<Utc>,
mdpost: MdPost,
) -> Self
{
use pulldown_cmark::{Options, Parser, html::push_html};
const PARSER_OPTIONS: Options = Options::ENABLE_TABLES
.union(Options::ENABLE_FOOTNOTES)
.union(Options::ENABLE_STRIKETHROUGH);
let mut parser = PostMdParser::new(
Parser::new_ext(&mdpost.markdown, PARSER_OPTIONS),
code_renderer
);
let mut html_buf = String::new();
push_html(&mut html_buf, parser.by_ref());
Self {
id,
title: mdpost.title,
author: mdpost.author,
html: PreEscaped(html_buf),
tags: mdpost.tags,
created,
updated,
}
}
}
/// Iterator struct which wraps another event iterator in order to render code blocks, collect the links
/// encountered and generate a summary of the text content.
struct PostMdParser<'p, I> {
iter: I,
code_renderer: &'p CodeBlockRenderer,
links: Vec<String>,
summary: String,
}
impl<'p, I> PostMdParser<'p, I> {
fn new(iter: I, code_renderer: &'p CodeBlockRenderer) -> Self {
Self {
iter,
code_renderer,
links: Vec::new(),
summary: String::new(),
}
}
}
impl<'e, 'p, I> Iterator for PostMdParser<'p, I> where I: Iterator<Item = pulldown_cmark::Event<'e>> {
type Item = pulldown_cmark::Event<'e>;
fn next(&mut self) -> Option<Self::Item> {
use pulldown_cmark::{CodeBlockKind, CowStr, Event, LinkType, Tag};
self.iter.next().map(|event| match event {
// When we reach a code block, we want to collect the text content until the code block finishes
// and have the `CodeBlockRenderer` render it
Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(lang))) => {
let mut code_buf = String::new();
for event in self.iter.by_ref() {
match event {
// The code block has finished, so break out of the loop
Event::End(Tag::CodeBlock(_)) => break,
// All text events until the end of the code block should be considered as code, so
// add the text to the `code_buf` to be rendered later
Event::Text(text) => code_buf.push_str(&text),
// Ignore all other events
_ => (),
}
}
let highlighted = self.code_renderer.render(&lang, &code_buf);
Event::Html(CowStr::Boxed(highlighted.into_string().into_boxed_str()))
},
event => {
match &event {
Event::Start(Tag::Link(LinkType::Inline | LinkType::Autolink, destination, _title)) => {
self.links.push(destination.clone().into_string());
},
//TODO: better way of generating a summary
Event::Text(text) => {
if self.summary.is_empty() {
self.summary = text.clone().into_string();
}
},
_ => (),
}
event
},
})
}
}
#[derive(knuffel::Decode)]
struct HeaderNode {
#[knuffel(child, unwrap(argument))]
title: String,
#[knuffel(child, unwrap(argument))]
author: String,
#[knuffel(children(name="tag"))]
tags: Vec<TagNode>,
}
#[derive(knuffel::Decode)]
struct TagNode {
#[knuffel(argument)]
tag: String,
}
#[derive(Debug)]
struct MdPost {
markdown: String,
title: String,
author: String,
tags: Vec<ShString22>,
}
impl MdPost {
fn parse(file_name: &str, source: &str) -> Result<Self, ParseError> {
const END_OF_HEADER_DELIM: &str = "\n---\n";
let (header, md) = source.split_once(END_OF_HEADER_DELIM)
.ok_or(ParseError::MissingHeader)?;
let header = knuffel::parse::<HeaderNode>(file_name, header)
.map_err(|err| ParseError::InvalidHeader(Box::new(err)))?;
let md = md.trim_start();
Ok(Self {
markdown: md.to_owned(),
title: header.title,
author: header.author,
tags: header.tags.into_iter().map(|tag| tag.tag.into()).collect(),
})
}
}
#[derive(Debug)]
pub enum ParseError {
MissingHeader,
InvalidHeader(Box<knuffel::Error>),
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ParseError::MissingHeader => write!(f, "Post file has no header"),
ParseError::InvalidHeader(err) => fmt::Display::fmt(err, f),
}
}
}
impl error::Error for ParseError {}

@ -0,0 +1,159 @@
use std::{
collections::{BTreeSet, hash_map, HashMap, HashSet},
iter::FusedIterator,
sync::Arc,
};
use chrono::{DateTime, Utc};
use libshire::strings::ShString22;
use tokio::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard};
use crate::post::{Post, PostId};
#[derive(Clone)]
pub struct ConcurrentPostsStore {
inner: Arc<RwLock<PostsStore>>,
}
impl ConcurrentPostsStore {
pub fn new() -> Self {
Self { inner: Arc::new(RwLock::new(PostsStore::new())) }
}
pub async fn read(&self) -> RwLockReadGuard<'_, PostsStore> {
self.inner.read().await
}
pub fn write_blocking(&self) -> RwLockWriteGuard<'_, PostsStore> {
self.inner.blocking_write()
}
pub async fn get(&self, id: &str) -> Option<Arc<Post>> {
self.read().await.get(id)
}
}
impl Default for ConcurrentPostsStore {
fn default() -> Self {
Self::new()
}
}
pub struct PostsStore {
posts: HashMap<PostId, Arc<Post>>,
created_ix: BTreeSet<CreatedIxEntry>,
tags_ix: HashMap<ShString22, HashSet<PostId>>,
}
// TODO: shrink the various collections on removal to deallocate unneeded space
impl PostsStore {
pub fn new() -> Self {
Self {
posts: HashMap::new(),
created_ix: BTreeSet::new(),
tags_ix: HashMap::new(),
}
}
pub fn get(&self, id: &str) -> Option<Arc<Post>> {
self.posts.get(id).cloned()
}
pub fn insert(&mut self, post: Post) -> Option<Arc<Post>> {
let old_post = self.remove(post.id_str());
// Insert the post into each of the tag indexes.
for tag in post.tags() {
// First, get the existing `HashSet` for the tag, or create a new one if one does not
// already exist. Then, insert the post's ID into the `HashSet`.
match self.tags_ix.entry(tag.clone()) {
hash_map::Entry::Occupied(entry) => entry.into_mut(),
hash_map::Entry::Vacant(entry) => entry.insert(HashSet::new()),
}.insert(post.id().clone());
}
// Insert the post into the correct position of the created BTree index.
self.created_ix.insert(CreatedIxEntry::new(&post));
// Wrap the post with an atomic reference counter and insert it into the main posts
// `HashMap`.
self.posts.insert(post.id().clone(), Arc::new(post));
old_post
}
pub fn remove(&mut self, id: &str) -> Option<Arc<Post>> {
match self.posts.remove(id) {
Some(post) => {
// Remove the post's entry in the created index.
self.created_ix
.remove(&CreatedIxEntry::new(&post));
// Remove every occurence of the post from the tags index.
for tag in post.tags() {
if let Some(tag_ix) = self.tags_ix.get_mut(tag) {
tag_ix.remove(id);
}
}
Some(post)
},
None => None,
}
}
pub fn clear(&mut self) {
self.tags_ix.clear();
self.created_ix.clear();
self.posts.clear();
}
pub fn iter(&self)
-> impl '_
+ Iterator<Item = Arc<Post>>
+ ExactSizeIterator
+ FusedIterator
+ Clone
{
self.posts.values().cloned()
}
pub fn iter_by_created(&self)
-> impl '_
+ Iterator<Item = Arc<Post>>
+ DoubleEndedIterator
+ ExactSizeIterator
+ FusedIterator
+ Clone
{
// For each entry of the created index, look up the corresponding post in the posts map and
// return the post. Every entry in the created index should contain the ID of a post in the
// posts map, so the `expect` should never fail.
self.created_ix
.iter()
.map(|entry| self.get(&entry.id)
.expect("invalid entry in `created_ix` pointing to a post that does not exist"))
}
}
impl Default for PostsStore {
fn default() -> Self {
Self::new()
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
struct CreatedIxEntry {
created: DateTime<Utc>,
id: PostId,
}
impl CreatedIxEntry {
fn new(post: &Post) -> Self {
Self {
created: post.created(),
id: post.id().clone(),
}
}
}

@ -0,0 +1,262 @@
use std::{
fmt,
fs,
io::{self, Read},
path::PathBuf,
sync::mpsc,
};
use chrono::{DateTime, Utc};
use notify::DebouncedEvent;
use tracing::{info, warn, error};
use crate::codeblock::CodeBlockRenderer;
use crate::post::{ParseError, Post, PostId};
use crate::posts_store::ConcurrentPostsStore;
pub struct Renderer {
posts: ConcurrentPostsStore,
code_renderer: CodeBlockRenderer,
posts_dir_path: PathBuf,
rx: mpsc::Receiver<DebouncedEvent>,
}
impl Renderer {
pub fn new(
posts: ConcurrentPostsStore,
code_renderer: CodeBlockRenderer,
posts_dir_path: PathBuf,
) -> (Self, mpsc::Sender<DebouncedEvent>)
{
let (tx, rx) = mpsc::channel();
// Buffer a rescan event here so that it will be the first event received when
// `handle_events` is called. This will cause the `Renderer` to perform an "initial scan"
// of the post files.
tx.send(DebouncedEvent::Rescan).unwrap();
(Self {
posts,
code_renderer,
posts_dir_path,
rx,
}, tx)
}
#[tracing::instrument(skip(self))]
pub fn handle_events(self) {
while let Ok(notify_event) = self.rx.recv() {
let fs_event = match notify_event {
// Convert create & write events for valid post file names to update events.
DebouncedEvent::Create(path) | DebouncedEvent::Write(path) => {
EventTarget::from_path(path)
.map(Event::Update)
},
// Convert remove events for valid post file names.
DebouncedEvent::Remove(path) => {
EventTarget::from_path(path)
.map(Event::Remove)
},
// Convert rename events depending on whether the old / new paths are valid post
// file names.
DebouncedEvent::Rename(old_path, new_path) => {
match (EventTarget::from_path(old_path), EventTarget::from_path(new_path)) {
(Some(old_target), Some(new_target)) => Some(Event::Rename(old_target, new_target)),
(None, Some(new_target)) => Some(Event::Update(new_target)),
(Some(old_target), None) => Some(Event::Remove(old_target)),
(None, None) => None,
}
},
// Convert rescan events, where it is necessary to read the directory's contents.
DebouncedEvent::Rescan => Some(Event::Scan),
// Ignore all other events.
_ => None,
};
if let Some(fs_event) = fs_event {
self.handle_event(&fs_event);
}
}
info!("Filesystem events channel closed, exiting");
}
fn handle_event(&self, event: &Event) {
info!(event = ?event);
match event {
Event::Update(target) => self.update(target),
Event::Rename(old_target, new_target) => self.rename(old_target, new_target),
Event::Remove(target) => self.remove(target),
Event::Scan => self.scan(),
}
}
#[tracing::instrument(skip(self))]
fn update(&self, target: &EventTarget) {
match self.parse_post_from_target(target) {
Ok(post) => {
let mut guard = self.posts.write_blocking();
guard.insert(post);
},
Err(err) => {
err.log();
}
};
}
#[tracing::instrument(skip(self))]
fn rename(&self, old_target: &EventTarget, new_target: &EventTarget) {
let post_res = self.parse_post_from_target(new_target);
let mut guard = self.posts.write_blocking();
guard.remove(&old_target.id);
match post_res {
Ok(post) => {
guard.insert(post);
},
Err(err) => {
err.log();
},
}
}
#[tracing::instrument(skip(self))]
fn remove(&self, target: &EventTarget) {
self.posts.write_blocking().remove(&target.id);
}
#[tracing::instrument(skip(self))]
fn scan(&self) {
let posts_dir = match fs::read_dir(&self.posts_dir_path) {
Ok(posts_dir) => posts_dir,
Err(err) => {
Error::Io(Box::new(err)).log();
return;
},
};
let mut posts = Vec::new();
for dir_entry in posts_dir {
let dir_entry = match dir_entry {
Ok(dir_entry) => dir_entry,
Err(err) => {
Error::Io(Box::new(err)).log();
continue;
},
};
if let Some(target) = EventTarget::from_path(dir_entry.path()) {
posts.push(match self.parse_post_from_target(&target) {
Ok(post) => post,
Err(err) => {
err.log();
continue;
},
});
}
}
let mut guard = self.posts.write_blocking();
guard.clear();
for post in posts {
guard.insert(post);
}
}
fn parse_post_from_target(&self, target: &EventTarget) -> Result<Post, Error> {
let mut fd = fs::OpenOptions::new()
.read(true)
.open(&target.path)
.map_err(|err| Error::Io(Box::new(err)))?;
let metadata = fd.metadata()
.map_err(|err| Error::Io(Box::new(err)))?;
if !metadata.file_type().is_file() {
return Err(Error::NotAFile);
}
let (created, updated) = metadata.created()
.and_then(|created| metadata.modified()
.map(|modified| (DateTime::<Utc>::from(created), DateTime::<Utc>::from(modified))))
.unwrap_or_else(|_| {
let now = Utc::now();
(now, now)
});
let contents = {
let mut buf = String::new();
fd.read_to_string(&mut buf)
.map_err(|err| Error::Io(Box::new(err)))?;
buf
};
drop(fd);
Post::parse(
&self.code_renderer,
target.id.clone(),
&target.path.to_string_lossy(),
created,
updated,
&contents
).map_err(|err| Error::Parsing(Box::new(err)))
}
}
#[derive(Debug)]
enum Event {
Update(EventTarget),
Rename(EventTarget, EventTarget),
Remove(EventTarget),
Scan,
}
struct EventTarget {
pub path: PathBuf,
pub id: PostId,
}
impl fmt::Debug for EventTarget {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}@{}", self.id, self.path.to_string_lossy())
}
}
impl EventTarget {
pub fn from_path(path: PathBuf) -> Option<Self> {
path.file_name()
.and_then(|file_name| file_name.to_str())
.and_then(PostId::from_file_name)
.map(|id| Self {
path,
id,
})
}
}
pub enum Error {
Io(Box<io::Error>),
NotAFile,
Parsing(Box<ParseError>),
}
impl Error {
fn log(&self) {
match self {
Error::Io(err) => {
error!(error = %err, "IO error while processing event");
},
Error::NotAFile => {
warn!("Event target is not a regular file");
},
Error::Parsing(err) => {
warn!(error = %err, "Parsing error while processing event");
},
}
}
}

@ -0,0 +1,344 @@
.cb_code {
color: #323232;
background-color: #ffffff;
}
.cb_comment {
color: #969896;
font-style: italic;
}
.cb_string {
color: #183691;
}
.cb_regexp-operator {
color: #a71d5d;
}
.cb_string.cb_regexp.cb_characterclass .cb_punctuation.cb_definition.cb_string.cb_begin,
.cb_string.cb_regexp.cb_characterclass .cb_punctuation.cb_definition.cb_string.cb_end {
color: #a71d5d;
}
.cb_constant.cb_numeric {
color: #0086b3;
}
.cb_constant.cb_language {
color: #0086b3;
}
.cb_constant.cb_character,
.cb_constant.cb_other,
.cb_variable.cb_other.cb_constant {
color: #0086b3;
}
.cb_variable {
color: #323232;
}
.cb_keyword {
color: #a71d5d;
font-weight: bold;
}
.cb_bitwise-operator {
color: #a71d5d;
font-weight: bold;
}
.cb_storage {
color: #a71d5d;
font-weight: bold;
}
.cb_storage.cb_type {
color: #a71d5d;
font-weight: bold;
}
.cb_entity.cb_name.cb_class {
color: #0086b3;
}
.cb_entity.cb_other.cb_inherited-class {
color: #0086b3;
}
.cb_entity.cb_name.cb_function {
color: #795da3;
font-weight: bold;
}
.cb_variable.cb_parameter {
color: #323232;
}
.cb_entity.cb_name.cb_tag {
color: #63a35c;
}
.cb_entity.cb_other.cb_attribute-name {
color: #795da3;
}
.cb_support.cb_function {
color: #62a35c;
}
.cb_support.cb_constant {
color: #0086b3;
}
.cb_support.cb_type,
.cb_support.cb_class {
color: #0086b3;
}
.cb_support.cb_other.cb_variable {
color: #323232;
}
.cb_invalid,
.cb_invalid.cb_illegal,
.cb_invalid.cb_deprecated {
color: #b52a1d;
background-color: #f5f5f5;
font-weight: bold;
}
.cb_entity.cb_name.cb_filename.cb_find-in-files {
color: #323232;
font-weight: bold;
}
.cb_constant.cb_numeric.cb_line-number.cb_find-in-files,
.cb_constant.cb_numeric.cb_line-number.cb_match.cb_find-in-files {
color: #b3b3b3;
}
.cb_meta.cb_diff.cb_header {
color: #969896;
background-color: #ffffff;
font-style: italic;
}
.cb_meta.cb_diff.cb_header .cb_punctuation.cb_definition.cb_from-file.cb_diff {
color: #bd2c00;
background-color: #ffecec;
font-weight: bold;
font-style: italic;
}
.cb_meta.cb_diff.cb_header .cb_punctuation.cb_definition.cb_to-file.cb_diff {
color: #55a532;
background-color: #eaffea;
font-weight: bold;
font-style: italic;
}
.cb_meta.cb_diff.cb_range {
color: #969896;
font-weight: bold;
font-style: italic;
}
.cb_markup.cb_deleted {
background-color: #ffecec;
}
.cb_markup.cb_deleted .cb_punctuation.cb_definition.cb_inserted {
color: #bd2c00;
font-weight: bold;
}
.cb_markup.cb_inserted {
background-color: #eaffea;
}
.cb_markup.cb_inserted .cb_punctuation.cb_definition.cb_inserted {
color: #55a532;
font-weight: bold;
}
.cb_markup.cb_deleted.cb_git_gutter {
color: #bd2c00;
}
.cb_markup.cb_inserted.cb_git_gutter {
color: #55a532;
}
.cb_markup.cb_changed.cb_git_gutter {
color: #0086b3;
}
.cb_markup.cb_ignored.cb_git_gutter {
color: #b3b3b3;
}
.cb_markup.cb_untracked.cb_git_gutter {
color: #b3b3b3;
}
.cb_source.cb_css .cb_punctuation.cb_definition.cb_entity {
color: #323232;
}
.cb_source.cb_css .cb_entity.cb_other.cb_attribute-name.cb_pseudo-class,
.cb_source.cb_css .cb_entity.cb_other.cb_attribute-name.cb_pseudo-element {
color: #a71d5d;
}
.cb_source.cb_css .cb_meta.cb_value,
.cb_source.cb_css .cb_support.cb_constant,
.cb_source.cb_css .cb_support.cb_function {
color: #323232;
}
.cb_source.cb_css .cb_constant.cb_other.cb_color {
color: #ed6a43;
}
.cb_source.cb_scss .cb_punctuation.cb_definition.cb_entity {
color: #323232;
}
.cb_source.cb_scss .cb_entity.cb_other.cb_attribute-name.cb_pseudo-class,
.cb_source.cb_scss .cb_entity.cb_other.cb_attribute-name.cb_pseudo-element {
color: #a71d5d;
}
.cb_source.cb_scss .cb_support.cb_constant.cb_property-value,
.cb_source.cb_scss .cb_support.cb_function {
color: #323232;
}
.cb_source.cb_scss .cb_variable {
color: #a71d5d;
}
.cb_variable.cb_language.cb_this.cb_js {
color: #ed6a43;
}
.cb_source.cb_js .cb_entity.cb_name.cb_function {
color: #323232;
}
.cb_source.cb_js .cb_meta.cb_function .cb_entity.cb_name.cb_function,
.cb_source.cb_js .cb_entity.cb_name.cb_function .cb_meta.cb_function {
color: #795da3;
font-weight: bold;
}
.cb_entity.cb_name.cb_type.cb_new.cb_js {
color: #795da3;
}
.cb_variable.cb_language.cb_prototype.cb_js {
color: #0086b3;
}
.cb_source.cb_js .cb_support.cb_function {
color: #0086b3;
}
.cb_support.cb_type.cb_object.cb_console.cb_js {
color: #795da3;
}
.cb_source.cb_python .cb_keyword {
font-weight: bold;
}
.cb_source.cb_python .cb_storage {
font-weight: bold;
}
.cb_source.cb_python .cb_storage.cb_type {
font-weight: bold;
}
.cb_source.cb_python .cb_entity.cb_name.cb_function {
color: #323232;
font-weight: bold;
}
.cb_source.cb_php .cb_entity.cb_name.cb_type.cb_class {
color: #323232;
font-weight: bold;
}
.cb_variable.cb_language.cb_ruby {
color: #ed6a43;
}
.cb_entity.cb_name.cb_type.cb_module.cb_ruby {
color: #795da3;
font-weight: bold;
}
.cb_entity.cb_name.cb_type.cb_class.cb_ruby {
color: #795da3;
font-weight: bold;
}
.cb_entity.cb_other.cb_inherited-class.cb_ruby {
color: #795da3;
font-weight: bold;
}
.cb_text.cb_html.cb_markdown .cb_punctuation.cb_definition {
color: #a71d5d;
}
.cb_text.cb_html.cb_markdown .cb_meta.cb_separator {
color: #b3b3b3;
}
.cb_text.cb_html.cb_markdown .cb_markup.cb_heading {
font-weight: bold;
}
.cb_text.cb_html.cb_markdown .cb_markup.cb_raw.cb_block {
color: #323232;
}
.cb_text.cb_html.cb_markdown .cb_markup.cb_raw.cb_inline {
color: #323232;
}
.cb_text.cb_html.cb_markdown .cb_meta.cb_link,
.cb_text.cb_html.cb_markdown .cb_meta.cb_image {
color: #4183c4;
}
.cb_text.cb_html.cb_markdown .cb_markup.cb_underline.cb_link,
.cb_text.cb_html.cb_markdown .cb_constant.cb_other.cb_reference {
font-style: italic;
}
.cb_text.cb_html.cb_markdown .cb_markup.cb_list {
color: #ed6a43;
}
.cb_text.cb_html.cb_markdown .cb_markup.cb_bold {
font-weight: bold;
}
.cb_text.cb_html.cb_markdown .cb_markup.cb_italic {
font-style: italic;
}
.cb_text.cb_html.cb_markdown .cb_markup.cb_bold .cb_markup.cb_italic {
font-weight: bold;
font-style: italic;
}
.cb_text.cb_html.cb_markdown .cb_markup.cb_italic .cb_markup.cb_bold {
font-weight: bold;
font-style: italic;
}

@ -0,0 +1,7 @@
[package]
name = "css_gen"
version = "0.1.0"
edition = "2021"
[dependencies]
syntect = "4"

@ -0,0 +1,30 @@
use std::env;
use std::process;
use syntect::highlighting::ThemeSet;
use syntect::html::{css_for_theme_with_class_style, ClassStyle};
const CLASS_STYLE: ClassStyle = ClassStyle::SpacedPrefixed { prefix: "cb_" };
fn main() {
let theme_set = ThemeSet::load_defaults();
let theme_name = env::args().nth(1).unwrap_or_else(|| {
eprintln!("No theme specified");
eprint_available_themes(&theme_set);
process::exit(1)
});
let theme = theme_set.themes.get(&theme_name).unwrap_or_else(|| {
eprintln!("Theme not found: {}", theme_name);
eprint_available_themes(&theme_set);
process::exit(1)
});
let css = css_for_theme_with_class_style(theme, CLASS_STYLE);
println!("{}", css);
}
fn eprint_available_themes(theme_set: &ThemeSet) {
eprintln!("Available themes:");
for key in theme_set.themes.keys() {
eprintln!(" {}", key);
}
}
Loading…
Cancel
Save