Atom feed, GUID for RSS feed
parent
aa06d2c5ef
commit
1b4be0862c
@ -0,0 +1,82 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use atom_syndication as atom;
|
||||
use axum::{
|
||||
body::Bytes,
|
||||
extract::Extension,
|
||||
};
|
||||
|
||||
use super::response::Atom;
|
||||
use crate::{
|
||||
Config,
|
||||
posts_store::ConcurrentPostsStore,
|
||||
time::unix_epoch,
|
||||
};
|
||||
|
||||
pub async fn handle(
|
||||
Extension(config): Extension<Arc<Config>>,
|
||||
Extension(posts): Extension<ConcurrentPostsStore>,
|
||||
) -> Atom<Bytes> {
|
||||
let (atom_entries, updated) = {
|
||||
let guard = posts.read().await;
|
||||
|
||||
let atom_entries = guard.iter_by_created()
|
||||
.take(config.atom.num_posts)
|
||||
.map(|post| {
|
||||
atom::EntryBuilder::default()
|
||||
.id(format!("urn:uuid:{}", post.uuid()))
|
||||
.title(post.title().to_owned())
|
||||
.updated(post.updated())
|
||||
.links(vec![
|
||||
atom::LinkBuilder::default()
|
||||
.href(format!(
|
||||
"{}://{}/articles/{}",
|
||||
config.self_ref.protocol,
|
||||
config.self_ref.domain,
|
||||
post.id()
|
||||
))
|
||||
.rel("alternate".to_owned())
|
||||
.mime_type(Some("text/html".to_owned()))
|
||||
.build()
|
||||
])
|
||||
.author(atom::PersonBuilder::default()
|
||||
.name(post.author().to_owned())
|
||||
.build())
|
||||
.build()
|
||||
})
|
||||
.collect::<Vec<atom::Entry>>();
|
||||
|
||||
let updated = guard.last_updated()
|
||||
.unwrap_or_else(unix_epoch);
|
||||
|
||||
(atom_entries, updated)
|
||||
};
|
||||
|
||||
Atom(atom::FeedBuilder::default()
|
||||
.id(format!("urn:uuid:{}", *config.namespace_uuid))
|
||||
.title(config.atom.title.clone())
|
||||
.updated(updated)
|
||||
.links(vec![
|
||||
atom::LinkBuilder::default()
|
||||
.href(format!(
|
||||
"{}://{}/atom.xml",
|
||||
config.self_ref.protocol,
|
||||
config.self_ref.domain
|
||||
))
|
||||
.rel("self".to_owned())
|
||||
.build(),
|
||||
atom::LinkBuilder::default()
|
||||
.href(format!(
|
||||
"{}://{}/articles/",
|
||||
config.self_ref.protocol,
|
||||
config.self_ref.domain
|
||||
))
|
||||
.rel("alternate".to_owned())
|
||||
.mime_type(Some("text/html".to_owned()))
|
||||
.build()
|
||||
])
|
||||
.entries(atom_entries)
|
||||
.build()
|
||||
.to_string()
|
||||
.into())
|
||||
}
|
||||
@ -0,0 +1,5 @@
|
||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||
|
||||
pub fn unix_epoch() -> DateTime<Utc> {
|
||||
DateTime::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc)
|
||||
}
|
||||
@ -0,0 +1,60 @@
|
||||
use std::ops;
|
||||
|
||||
use knuffel::{
|
||||
ast::{Literal, TypeName},
|
||||
decode::{Context, Kind},
|
||||
errors::{DecodeError, ExpectedType},
|
||||
span::Spanned,
|
||||
traits::ErrorSpan,
|
||||
DecodeScalar,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Default, Debug)]
|
||||
#[repr(transparent)]
|
||||
pub struct Uuid(pub libshire::uuid::Uuid);
|
||||
|
||||
impl Uuid {
|
||||
pub fn as_inner(&self) -> &libshire::uuid::Uuid {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl ops::Deref for Uuid {
|
||||
type Target = libshire::uuid::Uuid;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_inner()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: ErrorSpan> DecodeScalar<S> for Uuid {
|
||||
fn type_check(type_name: &Option<Spanned<TypeName, S>>, ctx: &mut Context<S>) {
|
||||
if let Some(type_name) = type_name {
|
||||
ctx.emit_error(DecodeError::TypeName {
|
||||
span: type_name.span().clone(),
|
||||
found: Some((&**type_name).clone()),
|
||||
expected: ExpectedType::no_type(),
|
||||
rust_type: "Uuid",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn raw_decode(
|
||||
value: &Spanned<Literal, S>,
|
||||
ctx: &mut Context<S>,
|
||||
) -> Result<Self, DecodeError<S>> {
|
||||
match &**value {
|
||||
Literal::String(s) => match s.parse() {
|
||||
Ok(uuid) => Ok(Self(uuid)),
|
||||
Err(err) => {
|
||||
ctx.emit_error(DecodeError::conversion(value, err));
|
||||
Ok(Default::default())
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
ctx.emit_error(DecodeError::scalar_kind(Kind::String, value));
|
||||
Ok(Default::default())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue