Many improvements around bandwidth use

- Use ETags for RSS/Atom feeds
- Use cache-control headers
- Update to rust nightly (for rust-analyzer and faster builds)
- Limit feeds to the last 20 posts:
  https://twitter.com/theprincessxena/status/1349891678857998339
- Use if-none-match to limit bandwidth further

Also does this:

- bump go_vanity to 0.3.0 and lets users customize the branch name
- fix formatting on jsonfeed
- remove last vestige of kubernetes/docker support

Signed-off-by: Christine Dodrill <me@christine.website>
This commit is contained in:
Cadey Ratio 2021-01-14 21:28:23 -05:00
parent 49a4d7cbea
commit 31cad90e0a
21 changed files with 249 additions and 221 deletions

28
Cargo.lock generated
View File

@ -665,6 +665,17 @@ dependencies = [
"wasi 0.9.0+wasi-snapshot-preview1", "wasi 0.9.0+wasi-snapshot-preview1",
] ]
[[package]]
name = "getrandom"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4060f4657be78b8e766215b02b18a2e862d83745545de804638e2b545e81aee6"
dependencies = [
"cfg-if 1.0.0",
"libc",
"wasi 0.10.0+wasi-snapshot-preview1",
]
[[package]] [[package]]
name = "gimli" name = "gimli"
version = "0.23.0" version = "0.23.0"
@ -679,7 +690,7 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]] [[package]]
name = "go_vanity" name = "go_vanity"
version = "0.1.0" version = "0.2.0"
dependencies = [ dependencies = [
"mime", "mime",
"ructe", "ructe",
@ -1585,7 +1596,7 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [ dependencies = [
"getrandom", "getrandom 0.1.15",
"libc", "libc",
"rand_chacha 0.2.2", "rand_chacha 0.2.2",
"rand_core 0.5.1", "rand_core 0.5.1",
@ -1633,7 +1644,7 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [ dependencies = [
"getrandom", "getrandom 0.1.15",
] ]
[[package]] [[package]]
@ -2515,6 +2526,16 @@ version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05e42f7c18b8f902290b009cde6d651262f956c98bc51bca4cd1d511c9cd85c7" checksum = "05e42f7c18b8f902290b009cde6d651262f956c98bc51bca4cd1d511c9cd85c7"
[[package]]
name = "uuid"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
dependencies = [
"getrandom 0.2.1",
"serde",
]
[[package]] [[package]]
name = "vcpkg" name = "vcpkg"
version = "0.2.10" version = "0.2.10"
@ -2801,6 +2822,7 @@ dependencies = [
"tracing-futures", "tracing-futures",
"tracing-subscriber", "tracing-subscriber",
"url", "url",
"uuid",
"warp", "warp",
"xml-rs", "xml-rs",
] ]

View File

@ -34,6 +34,7 @@ tracing-subscriber = { version = "0.2", features = ["fmt"] }
warp = "0.2" warp = "0.2"
xml-rs = "0.8" xml-rs = "0.8"
url = "2" url = "2"
uuid = { version = "0.8", features = ["serde", "v4"] }
# workspace dependencies # workspace dependencies
go_vanity = { path = "./lib/go_vanity" } go_vanity = { path = "./lib/go_vanity" }

View File

@ -2,12 +2,17 @@
with pkgs; with pkgs;
let let
rust = pkgs.callPackage ./nix/rust.nix { };
srcNoTarget = dir: srcNoTarget = dir:
builtins.filterSource builtins.filterSource
(path: type: type != "directory" || builtins.baseNameOf path != "target") (path: type: type != "directory" || builtins.baseNameOf path != "target")
dir; dir;
naersk = pkgs.callPackage sources.naersk { }; naersk = pkgs.callPackage sources.naersk {
rustc = rust;
cargo = rust;
};
dhallpkgs = import sources.easy-dhall-nix { inherit pkgs; }; dhallpkgs = import sources.easy-dhall-nix { inherit pkgs; };
src = srcNoTarget ./.; src = srcNoTarget ./.;

View File

@ -1,23 +0,0 @@
{ system ? builtins.currentSystem }:
let
sources = import ./nix/sources.nix;
pkgs = import sources.nixpkgs { inherit system; };
callPackage = pkgs.lib.callPackageWith pkgs;
site = callPackage ./default.nix { };
dockerImage = pkg:
pkgs.dockerTools.buildLayeredImage {
name = "xena/christinewebsite";
tag = "latest";
contents = [ pkgs.cacert pkg ];
config = {
Cmd = [ "${pkg}/bin/xesite" ];
Env = [ "CONFIG_FNAME=${pkg}/config.dhall" "RUST_LOG=info" ];
WorkingDir = "/";
};
};
in dockerImage site

View File

@ -1,6 +1,6 @@
[package] [package]
name = "go_vanity" name = "go_vanity"
version = "0.1.0" version = "0.2.0"
authors = ["Christine Dodrill <me@christine.website>"] authors = ["Christine Dodrill <me@christine.website>"]
edition = "2018" edition = "2018"
build = "src/build.rs" build = "src/build.rs"

View File

@ -1,12 +1,12 @@
use crate::templates::RenderRucte;
use warp::{http::Response, Rejection, Reply}; use warp::{http::Response, Rejection, Reply};
use crate::templates::{RenderRucte};
include!(concat!(env!("OUT_DIR"), "/templates.rs")); include!(concat!(env!("OUT_DIR"), "/templates.rs"));
pub async fn gitea(pkg_name: &str, git_repo: &str) -> Result<impl Reply, Rejection> { pub async fn gitea(pkg_name: &str, git_repo: &str, branch: &str) -> Result<impl Reply, Rejection> {
Response::builder().html(|o| templates::gitea_html(o, pkg_name, git_repo)) Response::builder().html(|o| templates::gitea_html(o, pkg_name, git_repo, branch))
} }
pub async fn github(pkg_name: &str, git_repo: &str) -> Result<impl Reply, Rejection> { pub async fn github(pkg_name: &str, git_repo: &str, branch: &str) -> Result<impl Reply, Rejection> {
Response::builder().html(|o| templates::github_html(o, pkg_name, git_repo)) Response::builder().html(|o| templates::github_html(o, pkg_name, git_repo, branch))
} }

View File

@ -1,11 +1,11 @@
@(pkg_name: &str, git_repo: &str) @(pkg_name: &str, git_repo: &str, branch: &str)
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
<meta name="go-import" content="@pkg_name git @git_repo"> <meta name="go-import" content="@pkg_name git @git_repo">
<meta name="go-source" content="@pkg_name @git_repo @git_repo/src/master@{/dir@} @git_repo/src/master@{/dir@}/@{file@}#L@{line@}"> <meta name="go-source" content="@pkg_name @git_repo @git_repo/src/@branch@{/dir@} @git_repo/src/@branch@{/dir@}/@{file@}#L@{line@}">
<meta http-equiv="refresh" content="0; url=https://godoc.org/@pkg_name"> <meta http-equiv="refresh" content="0; url=https://godoc.org/@pkg_name">
</head> </head>
<body> <body>

View File

@ -1,11 +1,11 @@
@(pkg_name: &str, git_repo: &str) @(pkg_name: &str, git_repo: &str, branch: &str)
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
<meta name="go-import" content="@pkg_name git @git_repo"> <meta name="go-import" content="@pkg_name git @git_repo">
<meta name="go-source" content="@pkg_name @git_repo @git_repo/tree/master@{/dir@} @git_repo/blob/master@{/dir@}/@{file@}#L@{line@}"> <meta name="go-source" content="@pkg_name @git_repo @git_repo/tree/@branch@{/dir@} @git_repo/blob/@branch@{/dir@}/@{file@}#L@{line@}">
<meta http-equiv="refresh" content="0; url=https://godoc.org/@pkg_name"> <meta http-equiv="refresh" content="0; url=https://godoc.org/@pkg_name">
</head> </head>
<body> <body>

View File

@ -1,7 +1,7 @@
use std::default::Default; use std::default::Default;
use errors::*; use errors::*;
use feed::{Feed, Author, Attachment}; use feed::{Attachment, Author, Feed};
use item::{Content, Item}; use item::{Content, Item};
/// Feed Builder /// Feed Builder
@ -160,7 +160,7 @@ impl ItemBuilder {
match self.content { match self.content {
Some(Content::Text(t)) => { Some(Content::Text(t)) => {
self.content = Some(Content::Both(i.into(), t)); self.content = Some(Content::Both(i.into(), t));
}, }
_ => { _ => {
self.content = Some(Content::Html(i.into())); self.content = Some(Content::Html(i.into()));
} }
@ -172,10 +172,10 @@ impl ItemBuilder {
match self.content { match self.content {
Some(Content::Html(s)) => { Some(Content::Html(s)) => {
self.content = Some(Content::Both(s, i.into())); self.content = Some(Content::Both(s, i.into()));
}, }
_ => { _ => {
self.content = Some(Content::Text(i.into())); self.content = Some(Content::Text(i.into()));
}, }
} }
self self
} }
@ -197,8 +197,7 @@ impl ItemBuilder {
date_modified: self.date_modified, date_modified: self.date_modified,
author: self.author, author: self.author,
tags: self.tags, tags: self.tags,
attachments: self.attachments attachments: self.attachments,
}) })
} }
} }

View File

@ -1,7 +1,6 @@
use serde_json; use serde_json;
error_chain!{ error_chain! {
foreign_links { foreign_links {
Serde(serde_json::Error); Serde(serde_json::Error);
} }
} }

View File

@ -1,7 +1,7 @@
use std::default::Default; use std::default::Default;
use item::Item;
use builder::Builder; use builder::Builder;
use item::Item;
const VERSION_1: &'static str = "https://jsonfeed.org/version/1"; const VERSION_1: &'static str = "https://jsonfeed.org/version/1";
@ -145,9 +145,9 @@ pub struct Hub {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use serde_json; use serde_json;
use std::default::Default; use std::default::Default;
use super::*;
#[test] #[test]
fn serialize_feed() { fn serialize_feed() {
@ -168,18 +168,16 @@ mod tests {
#[test] #[test]
fn deserialize_feed() { fn deserialize_feed() {
let json = r#"{"version":"https://jsonfeed.org/version/1","title":"some title","items":[]}"#; let json =
r#"{"version":"https://jsonfeed.org/version/1","title":"some title","items":[]}"#;
let feed: Feed = serde_json::from_str(&json).unwrap(); let feed: Feed = serde_json::from_str(&json).unwrap();
let expected = Feed { let expected = Feed {
version: "https://jsonfeed.org/version/1".to_string(), version: "https://jsonfeed.org/version/1".to_string(),
title: "some title".to_string(), title: "some title".to_string(),
items: vec![], items: vec![],
..Default::default() ..Default::default()
}; };
assert_eq!( assert_eq!(feed, expected);
feed,
expected
);
} }
#[test] #[test]
@ -208,10 +206,7 @@ mod tests {
size_in_bytes: Some(1), size_in_bytes: Some(1),
duration_in_seconds: Some(1), duration_in_seconds: Some(1),
}; };
assert_eq!( assert_eq!(attachment, expected);
attachment,
expected
);
} }
#[test] #[test]
@ -229,17 +224,15 @@ mod tests {
#[test] #[test]
fn deserialize_author() { fn deserialize_author() {
let json = r#"{"name":"bob jones","url":"http://example.com","avatar":"http://img.com/blah"}"#; let json =
r#"{"name":"bob jones","url":"http://example.com","avatar":"http://img.com/blah"}"#;
let author: Author = serde_json::from_str(&json).unwrap(); let author: Author = serde_json::from_str(&json).unwrap();
let expected = Author { let expected = Author {
name: Some("bob jones".to_string()), name: Some("bob jones".to_string()),
url: Some("http://example.com".to_string()), url: Some("http://example.com".to_string()),
avatar: Some("http://img.com/blah".to_string()), avatar: Some("http://img.com/blah".to_string()),
}; };
assert_eq!( assert_eq!(author, expected);
author,
expected
);
} }
#[test] #[test]
@ -262,10 +255,7 @@ mod tests {
type_: "some-type".to_string(), type_: "some-type".to_string(),
url: "http://example.com".to_string(), url: "http://example.com".to_string(),
}; };
assert_eq!( assert_eq!(hub, expected);
hub,
expected
);
} }
#[test] #[test]

View File

@ -1,11 +1,11 @@
use std::fmt;
use std::default::Default; use std::default::Default;
use std::fmt;
use feed::{Author, Attachment};
use builder::ItemBuilder; use builder::ItemBuilder;
use feed::{Attachment, Author};
use serde::ser::{Serialize, Serializer, SerializeStruct}; use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor};
use serde::de::{self, Deserialize, Deserializer, Visitor, MapAccess}; use serde::ser::{Serialize, SerializeStruct, Serializer};
/// Represents the `content_html` and `content_text` attributes of an item /// Represents the `content_html` and `content_text` attributes of an item
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
@ -61,7 +61,8 @@ impl Default for Item {
impl Serialize for Item { impl Serialize for Item {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer where
S: Serializer,
{ {
let mut state = serializer.serialize_struct("Item", 14)?; let mut state = serializer.serialize_struct("Item", 14)?;
state.serialize_field("id", &self.id)?; state.serialize_field("id", &self.id)?;
@ -78,15 +79,15 @@ impl Serialize for Item {
Content::Html(ref s) => { Content::Html(ref s) => {
state.serialize_field("content_html", s)?; state.serialize_field("content_html", s)?;
state.serialize_field("content_text", &None::<Option<&str>>)?; state.serialize_field("content_text", &None::<Option<&str>>)?;
}, }
Content::Text(ref s) => { Content::Text(ref s) => {
state.serialize_field("content_html", &None::<Option<&str>>)?; state.serialize_field("content_html", &None::<Option<&str>>)?;
state.serialize_field("content_text", s)?; state.serialize_field("content_text", s)?;
}, }
Content::Both(ref s, ref t) => { Content::Both(ref s, ref t) => {
state.serialize_field("content_html", s)?; state.serialize_field("content_html", s)?;
state.serialize_field("content_text", t)?; state.serialize_field("content_text", t)?;
}, }
}; };
if self.summary.is_some() { if self.summary.is_some() {
state.serialize_field("summary", &self.summary)?; state.serialize_field("summary", &self.summary)?;
@ -118,7 +119,8 @@ impl Serialize for Item {
impl<'de> Deserialize<'de> for Item { impl<'de> Deserialize<'de> for Item {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de> where
D: Deserializer<'de>,
{ {
enum Field { enum Field {
Id, Id,
@ -135,11 +137,12 @@ impl<'de> Deserialize<'de> for Item {
Author, Author,
Tags, Tags,
Attachments, Attachments,
}; }
impl<'de> Deserialize<'de> for Field { impl<'de> Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de> where
D: Deserializer<'de>,
{ {
struct FieldVisitor; struct FieldVisitor;
@ -151,7 +154,8 @@ impl<'de> Deserialize<'de> for Item {
} }
fn visit_str<E>(self, value: &str) -> Result<Field, E> fn visit_str<E>(self, value: &str) -> Result<Field, E>
where E: de::Error where
E: de::Error,
{ {
match value { match value {
"id" => Ok(Field::Id), "id" => Ok(Field::Id),
@ -186,7 +190,8 @@ impl<'de> Deserialize<'de> for Item {
} }
fn visit_map<V>(self, mut map: V) -> Result<Item, V::Error> fn visit_map<V>(self, mut map: V) -> Result<Item, V::Error>
where V: MapAccess<'de> where
V: MapAccess<'de>,
{ {
let mut id = None; let mut id = None;
let mut url = None; let mut url = None;
@ -210,99 +215,93 @@ impl<'de> Deserialize<'de> for Item {
return Err(de::Error::duplicate_field("id")); return Err(de::Error::duplicate_field("id"));
} }
id = Some(map.next_value()?); id = Some(map.next_value()?);
}, }
Field::Url => { Field::Url => {
if url.is_some() { if url.is_some() {
return Err(de::Error::duplicate_field("url")); return Err(de::Error::duplicate_field("url"));
} }
url = map.next_value()?; url = map.next_value()?;
}, }
Field::ExternalUrl => { Field::ExternalUrl => {
if external_url.is_some() { if external_url.is_some() {
return Err(de::Error::duplicate_field("external_url")); return Err(de::Error::duplicate_field("external_url"));
} }
external_url = map.next_value()?; external_url = map.next_value()?;
}, }
Field::Title => { Field::Title => {
if title.is_some() { if title.is_some() {
return Err(de::Error::duplicate_field("title")); return Err(de::Error::duplicate_field("title"));
} }
title = map.next_value()?; title = map.next_value()?;
}, }
Field::ContentHtml => { Field::ContentHtml => {
if content_html.is_some() { if content_html.is_some() {
return Err(de::Error::duplicate_field("content_html")); return Err(de::Error::duplicate_field("content_html"));
} }
content_html = map.next_value()?; content_html = map.next_value()?;
}, }
Field::ContentText => { Field::ContentText => {
if content_text.is_some() { if content_text.is_some() {
return Err(de::Error::duplicate_field("content_text")); return Err(de::Error::duplicate_field("content_text"));
} }
content_text = map.next_value()?; content_text = map.next_value()?;
}, }
Field::Summary => { Field::Summary => {
if summary.is_some() { if summary.is_some() {
return Err(de::Error::duplicate_field("summary")); return Err(de::Error::duplicate_field("summary"));
} }
summary = map.next_value()?; summary = map.next_value()?;
}, }
Field::Image => { Field::Image => {
if image.is_some() { if image.is_some() {
return Err(de::Error::duplicate_field("image")); return Err(de::Error::duplicate_field("image"));
} }
image = map.next_value()?; image = map.next_value()?;
}, }
Field::BannerImage => { Field::BannerImage => {
if banner_image.is_some() { if banner_image.is_some() {
return Err(de::Error::duplicate_field("banner_image")); return Err(de::Error::duplicate_field("banner_image"));
} }
banner_image = map.next_value()?; banner_image = map.next_value()?;
}, }
Field::DatePublished => { Field::DatePublished => {
if date_published.is_some() { if date_published.is_some() {
return Err(de::Error::duplicate_field("date_published")); return Err(de::Error::duplicate_field("date_published"));
} }
date_published = map.next_value()?; date_published = map.next_value()?;
}, }
Field::DateModified => { Field::DateModified => {
if date_modified.is_some() { if date_modified.is_some() {
return Err(de::Error::duplicate_field("date_modified")); return Err(de::Error::duplicate_field("date_modified"));
} }
date_modified = map.next_value()?; date_modified = map.next_value()?;
}, }
Field::Author => { Field::Author => {
if author.is_some() { if author.is_some() {
return Err(de::Error::duplicate_field("author")); return Err(de::Error::duplicate_field("author"));
} }
author = map.next_value()?; author = map.next_value()?;
}, }
Field::Tags => { Field::Tags => {
if tags.is_some() { if tags.is_some() {
return Err(de::Error::duplicate_field("tags")); return Err(de::Error::duplicate_field("tags"));
} }
tags = map.next_value()?; tags = map.next_value()?;
}, }
Field::Attachments => { Field::Attachments => {
if attachments.is_some() { if attachments.is_some() {
return Err(de::Error::duplicate_field("attachments")); return Err(de::Error::duplicate_field("attachments"));
} }
attachments = map.next_value()?; attachments = map.next_value()?;
}, }
} }
} }
let id = id.ok_or_else(|| de::Error::missing_field("id"))?; let id = id.ok_or_else(|| de::Error::missing_field("id"))?;
let content = match (content_html, content_text) { let content = match (content_html, content_text) {
(Some(s), Some(t)) => { (Some(s), Some(t)) => Content::Both(s.to_string(), t.to_string()),
Content::Both(s.to_string(), t.to_string()) (Some(s), _) => Content::Html(s.to_string()),
}, (_, Some(t)) => Content::Text(t.to_string()),
(Some(s), _) => {
Content::Html(s.to_string())
},
(_, Some(t)) => {
Content::Text(t.to_string())
},
_ => return Err(de::Error::missing_field("content_html or content_text")), _ => return Err(de::Error::missing_field("content_html or content_text")),
}; };
@ -363,7 +362,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()), banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()), date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()),
author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), author: Some(
Author::new()
.name("bob jones")
.url("http://example.com")
.avatar("http://img.com/blah"),
),
tags: Some(vec!["json".into(), "feed".into()]), tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]), attachments: Some(vec![]),
}; };
@ -387,7 +391,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()), banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()), date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()),
author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), author: Some(
Author::new()
.name("bob jones")
.url("http://example.com")
.avatar("http://img.com/blah"),
),
tags: Some(vec!["json".into(), "feed".into()]), tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]), attachments: Some(vec![]),
}; };
@ -411,7 +420,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()), banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()), date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()),
author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), author: Some(
Author::new()
.name("bob jones")
.url("http://example.com")
.avatar("http://img.com/blah"),
),
tags: Some(vec!["json".into(), "feed".into()]), tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]), attachments: Some(vec![]),
}; };
@ -437,7 +451,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()), banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()), date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()),
author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), author: Some(
Author::new()
.name("bob jones")
.url("http://example.com")
.avatar("http://img.com/blah"),
),
tags: Some(vec!["json".into(), "feed".into()]), tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]), attachments: Some(vec![]),
}; };
@ -460,7 +479,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()), banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()), date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()),
author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), author: Some(
Author::new()
.name("bob jones")
.url("http://example.com")
.avatar("http://img.com/blah"),
),
tags: Some(vec!["json".into(), "feed".into()]), tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]), attachments: Some(vec![]),
}; };
@ -483,11 +507,15 @@ mod tests {
banner_image: Some("http://img.com/blah".into()), banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()), date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()),
author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), author: Some(
Author::new()
.name("bob jones")
.url("http://example.com")
.avatar("http://img.com/blah"),
),
tags: Some(vec!["json".into(), "feed".into()]), tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]), attachments: Some(vec![]),
}; };
assert_eq!(item, expected); assert_eq!(item, expected);
} }
} }

View File

@ -40,18 +40,20 @@
//! ``` //! ```
extern crate serde; extern crate serde;
#[macro_use] extern crate error_chain; #[macro_use]
#[macro_use] extern crate serde_derive; extern crate error_chain;
#[macro_use]
extern crate serde_derive;
extern crate serde_json; extern crate serde_json;
mod errors;
mod item;
mod feed;
mod builder; mod builder;
mod errors;
mod feed;
mod item;
pub use errors::*; pub use errors::*;
pub use feed::{Attachment, Author, Feed};
pub use item::*; pub use item::*;
pub use feed::{Feed, Author, Attachment};
use std::io::Write; use std::io::Write;
@ -116,14 +118,16 @@ pub fn to_vec_pretty(value: &Feed) -> Result<Vec<u8>> {
/// Serialize a Feed to JSON and output to an IO stream /// Serialize a Feed to JSON and output to an IO stream
pub fn to_writer<W>(writer: W, value: &Feed) -> Result<()> pub fn to_writer<W>(writer: W, value: &Feed) -> Result<()>
where W: Write where
W: Write,
{ {
Ok(serde_json::to_writer(writer, value)?) Ok(serde_json::to_writer(writer, value)?)
} }
/// Serialize a Feed to pretty-printed JSON and output to an IO stream /// Serialize a Feed to pretty-printed JSON and output to an IO stream
pub fn to_writer_pretty<W>(writer: W, value: &Feed) -> Result<()> pub fn to_writer_pretty<W>(writer: W, value: &Feed) -> Result<()>
where W: Write where
W: Write,
{ {
Ok(serde_json::to_writer_pretty(writer, value)?) Ok(serde_json::to_writer_pretty(writer, value)?)
} }
@ -137,10 +141,7 @@ mod tests {
fn from_str() { fn from_str() {
let feed = r#"{"version": "https://jsonfeed.org/version/1","title":"","items":[]}"#; let feed = r#"{"version": "https://jsonfeed.org/version/1","title":"","items":[]}"#;
let expected = Feed::default(); let expected = Feed::default();
assert_eq!( assert_eq!(super::from_str(&feed).unwrap(), expected);
super::from_str(&feed).unwrap(),
expected
);
} }
#[test] #[test]
fn from_reader() { fn from_reader() {
@ -148,39 +149,27 @@ mod tests {
let feed = feed.as_bytes(); let feed = feed.as_bytes();
let feed = Cursor::new(feed); let feed = Cursor::new(feed);
let expected = Feed::default(); let expected = Feed::default();
assert_eq!( assert_eq!(super::from_reader(feed).unwrap(), expected);
super::from_reader(feed).unwrap(),
expected
);
} }
#[test] #[test]
fn from_slice() { fn from_slice() {
let feed = r#"{"version": "https://jsonfeed.org/version/1","title":"","items":[]}"#; let feed = r#"{"version": "https://jsonfeed.org/version/1","title":"","items":[]}"#;
let feed = feed.as_bytes(); let feed = feed.as_bytes();
let expected = Feed::default(); let expected = Feed::default();
assert_eq!( assert_eq!(super::from_slice(&feed).unwrap(), expected);
super::from_slice(&feed).unwrap(),
expected
);
} }
#[test] #[test]
fn from_value() { fn from_value() {
let feed = r#"{"version": "https://jsonfeed.org/version/1","title":"","items":[]}"#; let feed = r#"{"version": "https://jsonfeed.org/version/1","title":"","items":[]}"#;
let feed: serde_json::Value = serde_json::from_str(&feed).unwrap(); let feed: serde_json::Value = serde_json::from_str(&feed).unwrap();
let expected = Feed::default(); let expected = Feed::default();
assert_eq!( assert_eq!(super::from_value(feed).unwrap(), expected);
super::from_value(feed).unwrap(),
expected
);
} }
#[test] #[test]
fn to_string() { fn to_string() {
let feed = Feed::default(); let feed = Feed::default();
let expected = r#"{"version":"https://jsonfeed.org/version/1","title":"","items":[]}"#; let expected = r#"{"version":"https://jsonfeed.org/version/1","title":"","items":[]}"#;
assert_eq!( assert_eq!(super::to_string(&feed).unwrap(), expected);
super::to_string(&feed).unwrap(),
expected
);
} }
#[test] #[test]
fn to_string_pretty() { fn to_string_pretty() {
@ -190,28 +179,19 @@ mod tests {
"title": "", "title": "",
"items": [] "items": []
}"#; }"#;
assert_eq!( assert_eq!(super::to_string_pretty(&feed).unwrap(), expected);
super::to_string_pretty(&feed).unwrap(),
expected
);
} }
#[test] #[test]
fn to_value() { fn to_value() {
let feed = r#"{"version":"https://jsonfeed.org/version/1","title":"","items":[]}"#; let feed = r#"{"version":"https://jsonfeed.org/version/1","title":"","items":[]}"#;
let expected: serde_json::Value = serde_json::from_str(&feed).unwrap(); let expected: serde_json::Value = serde_json::from_str(&feed).unwrap();
assert_eq!( assert_eq!(super::to_value(Feed::default()).unwrap(), expected);
super::to_value(Feed::default()).unwrap(),
expected
);
} }
#[test] #[test]
fn to_vec() { fn to_vec() {
let feed = r#"{"version":"https://jsonfeed.org/version/1","title":"","items":[]}"#; let feed = r#"{"version":"https://jsonfeed.org/version/1","title":"","items":[]}"#;
let expected = feed.as_bytes(); let expected = feed.as_bytes();
assert_eq!( assert_eq!(super::to_vec(&Feed::default()).unwrap(), expected);
super::to_vec(&Feed::default()).unwrap(),
expected
);
} }
#[test] #[test]
fn to_vec_pretty() { fn to_vec_pretty() {
@ -221,10 +201,7 @@ mod tests {
"items": [] "items": []
}"#; }"#;
let expected = feed.as_bytes(); let expected = feed.as_bytes();
assert_eq!( assert_eq!(super::to_vec_pretty(&Feed::default()).unwrap(), expected);
super::to_vec_pretty(&Feed::default()).unwrap(),
expected
);
} }
#[test] #[test]
fn to_writer() { fn to_writer() {
@ -249,4 +226,3 @@ mod tests {
assert_eq!(result, feed); assert_eq!(result, feed);
} }
} }

View File

@ -4,7 +4,7 @@ let
pkgs = pkgs =
import sources.nixpkgs { overlays = [ (import sources.nixpkgs-mozilla) ]; }; import sources.nixpkgs { overlays = [ (import sources.nixpkgs-mozilla) ]; };
channel = "nightly"; channel = "nightly";
date = "2020-11-25"; date = "2021-01-14";
targets = [ ]; targets = [ ];
chan = pkgs.latest.rustChannels.stable.rust; chan = pkgs.rustChannelOfTargets channel date targets;
in chan in chan

View File

@ -5,7 +5,7 @@ let
dhall-yaml = dhallpkgs.dhall-yaml-simple; dhall-yaml = dhallpkgs.dhall-yaml-simple;
dhall = dhallpkgs.dhall-simple; dhall = dhallpkgs.dhall-simple;
xepkgs = import sources.xepkgs { inherit pkgs; }; xepkgs = import sources.xepkgs { inherit pkgs; };
rust = import ./nix/rust.nix { }; rust = pkgs.callPackage ./nix/rust.nix { };
in with pkgs; in with pkgs;
with xepkgs; with xepkgs;
mkShell { mkShell {

View File

@ -1,45 +0,0 @@
let kms = https://tulpa.dev/cadey/kubermemes/raw/branch/master/k8s/package.dhall
let kubernetes =
https://raw.githubusercontent.com/dhall-lang/dhall-kubernetes/master/1.15/package.dhall
let tag = env:GITHUB_SHA as Text ? "latest"
let image = "ghcr.io/xe/site:${tag}"
let vars
: List kubernetes.EnvVar.Type
= [ kubernetes.EnvVar::{ name = "PORT", value = Some "3030" }
, kubernetes.EnvVar::{ name = "RUST_LOG", value = Some "info" }
, kubernetes.EnvVar::{
, name = "PATREON_CLIENT_ID"
, value = Some env:PATREON_CLIENT_ID as Text
}
, kubernetes.EnvVar::{
, name = "PATREON_CLIENT_SECRET"
, value = Some env:PATREON_CLIENT_SECRET as Text
}
, kubernetes.EnvVar::{
, name = "PATREON_ACCESS_TOKEN"
, value = Some env:PATREON_ACCESS_TOKEN as Text
}
, kubernetes.EnvVar::{
, name = "PATREON_REFRESH_TOKEN"
, value = Some env:PATREON_REFRESH_TOKEN as Text
}
, kubernetes.EnvVar::{
, name = "MI_TOKEN"
, value = Some env:MI_TOKEN as Text
}
]
in kms.app.make
kms.app.Config::{
, name = "christinewebsite"
, appPort = 3030
, image
, replicas = 2
, domain = "christine.website"
, leIssuer = "prod"
, envVars = vars
}

View File

@ -86,6 +86,8 @@ pub async fn init(cfg: PathBuf) -> Result<State> {
everything.sort(); everything.sort();
everything.reverse(); everything.reverse();
let everything: Vec<Post> = everything.into_iter().take(20).collect();
let mut jfb = jsonfeed::Feed::builder() let mut jfb = jsonfeed::Feed::builder()
.title("Christine Dodrill's Blog") .title("Christine Dodrill's Blog")
.description("My blog posts and rants about various technology things.") .description("My blog posts and rants about various technology things.")

View File

@ -11,10 +11,11 @@ lazy_static! {
&["kind"] &["kind"]
) )
.unwrap(); .unwrap();
pub static ref ETAG: String = format!(r#"W/"{}""#, uuid::Uuid::new_v4().to_simple());
} }
#[instrument(skip(state))] #[instrument(skip(state))]
pub async fn jsonfeed(state: Arc<State>) -> Result<impl Reply, Rejection> { pub async fn jsonfeed(state: Arc<State>, since: Option<String>) -> Result<impl Reply, Rejection> {
HIT_COUNTER.with_label_values(&["json"]).inc(); HIT_COUNTER.with_label_values(&["json"]).inc();
let state = state.clone(); let state = state.clone();
Ok(warp::reply::json(&state.jf)) Ok(warp::reply::json(&state.jf))
@ -29,7 +30,22 @@ pub enum RenderError {
impl warp::reject::Reject for RenderError {} impl warp::reject::Reject for RenderError {}
#[instrument(skip(state))] #[instrument(skip(state))]
pub async fn atom(state: Arc<State>) -> Result<impl Reply, Rejection> { pub async fn atom(state: Arc<State>, since: Option<String>) -> Result<impl Reply, Rejection> {
if let Some(etag) = since {
if etag == ETAG.clone() {
return Response::builder()
.status(304)
.header("Content-Type", "text/plain")
.body(
"You already have the newest version of this feed."
.to_string()
.into_bytes(),
)
.map_err(RenderError::Build)
.map_err(warp::reject::custom);
}
}
HIT_COUNTER.with_label_values(&["atom"]).inc(); HIT_COUNTER.with_label_values(&["atom"]).inc();
let state = state.clone(); let state = state.clone();
let mut buf = Vec::new(); let mut buf = Vec::new();
@ -39,13 +55,29 @@ pub async fn atom(state: Arc<State>) -> Result<impl Reply, Rejection> {
Response::builder() Response::builder()
.status(200) .status(200)
.header("Content-Type", "application/atom+xml") .header("Content-Type", "application/atom+xml")
.header("ETag", ETAG.clone())
.body(buf) .body(buf)
.map_err(RenderError::Build) .map_err(RenderError::Build)
.map_err(warp::reject::custom) .map_err(warp::reject::custom)
} }
#[instrument(skip(state))] #[instrument(skip(state))]
pub async fn rss(state: Arc<State>) -> Result<impl Reply, Rejection> { pub async fn rss(state: Arc<State>, since: Option<String>) -> Result<impl Reply, Rejection> {
if let Some(etag) = since {
if etag == ETAG.clone() {
return Response::builder()
.status(304)
.header("Content-Type", "text/plain")
.body(
"You already have the newest version of this feed."
.to_string()
.into_bytes(),
)
.map_err(RenderError::Build)
.map_err(warp::reject::custom);
}
}
HIT_COUNTER.with_label_values(&["rss"]).inc(); HIT_COUNTER.with_label_values(&["rss"]).inc();
let state = state.clone(); let state = state.clone();
let mut buf = Vec::new(); let mut buf = Vec::new();
@ -55,6 +87,7 @@ pub async fn rss(state: Arc<State>) -> Result<impl Reply, Rejection> {
Response::builder() Response::builder()
.status(200) .status(200)
.header("Content-Type", "application/rss+xml") .header("Content-Type", "application/rss+xml")
.header("ETag", ETAG.clone())
.body(buf) .body(buf)
.map_err(RenderError::Build) .map_err(RenderError::Build)
.map_err(warp::reject::custom) .map_err(warp::reject::custom)

View File

@ -98,20 +98,39 @@ async fn main() -> Result<()> {
.and(with_state(state.clone())) .and(with_state(state.clone()))
.and_then(handlers::patrons); .and_then(handlers::patrons);
let files = warp::path("static").and(warp::fs::dir("./static")); let files = warp::path("static")
let css = warp::path("css").and(warp::fs::dir("./css")); .and(warp::fs::dir("./static"))
.map(|reply| {
warp::reply::with_header(
reply,
"Cache-Control",
"public, max-age=86400, stale-if-error=60",
)
});
let css = warp::path("css").and(warp::fs::dir("./css")).map(|reply| {
warp::reply::with_header(
reply,
"Cache-Control",
"public, max-age=86400, stale-if-error=60",
)
});
let sw = warp::path("sw.js").and(warp::fs::file("./static/js/sw.js")); let sw = warp::path("sw.js").and(warp::fs::file("./static/js/sw.js"));
let robots = warp::path("robots.txt").and(warp::fs::file("./static/robots.txt")); let robots = warp::path("robots.txt").and(warp::fs::file("./static/robots.txt"));
let favicon = warp::path("favicon.ico").and(warp::fs::file("./static/favicon/favicon.ico")); let favicon = warp::path("favicon.ico").and(warp::fs::file("./static/favicon/favicon.ico"));
let jsonfeed = warp::path("blog.json") let jsonfeed = warp::path("blog.json")
.and(with_state(state.clone())) .and(with_state(state.clone()))
.and(warp::header::optional("if-none-match"))
.and_then(handlers::feeds::jsonfeed); .and_then(handlers::feeds::jsonfeed);
let atom = warp::path("blog.atom") let atom = warp::path("blog.atom")
.and(with_state(state.clone())) .and(with_state(state.clone()))
.and(warp::header::optional("if-none-match"))
.and_then(handlers::feeds::atom); .and_then(handlers::feeds::atom);
let rss = warp::path("blog.rss") let rss = warp::path("blog.rss")
.and(with_state(state.clone())) .and(with_state(state.clone()))
.and(warp::header::optional("if-none-match"))
.and_then(handlers::feeds::rss); .and_then(handlers::feeds::rss);
let sitemap = warp::path("sitemap.xml") let sitemap = warp::path("sitemap.xml")
.and(with_state(state.clone())) .and(with_state(state.clone()))
@ -120,6 +139,7 @@ async fn main() -> Result<()> {
let go_vanity_jsonfeed = warp::path("jsonfeed") let go_vanity_jsonfeed = warp::path("jsonfeed")
.and(warp::any().map(move || "christine.website/jsonfeed")) .and(warp::any().map(move || "christine.website/jsonfeed"))
.and(warp::any().map(move || "https://tulpa.dev/Xe/jsonfeed")) .and(warp::any().map(move || "https://tulpa.dev/Xe/jsonfeed"))
.and(warp::any().map(move || "master"))
.and_then(go_vanity::gitea); .and_then(go_vanity::gitea);
let metrics_endpoint = warp::path("metrics").and(warp::path::end()).map(move || { let metrics_endpoint = warp::path("metrics").and(warp::path::end()).map(move || {
@ -134,14 +154,37 @@ async fn main() -> Result<()> {
.unwrap() .unwrap()
}); });
let site = index let static_pages = index
.or(contact.or(feeds).or(resume.or(signalboost)).or(patrons)) .or(feeds)
.or(blog_index.or(series.or(series_view).or(post_view))) .or(resume.or(signalboost))
.or(patrons)
.or(jsonfeed.or(atom.or(sitemap)).or(rss))
.or(favicon.or(robots).or(sw))
.or(contact)
.map(|reply| {
warp::reply::with_header(
reply,
"Cache-Control",
"public, max-age=86400, stale-if-error=60",
)
});
let dynamic_pages = blog_index
.or(series.or(series_view).or(post_view))
.or(gallery_index.or(gallery_post_view)) .or(gallery_index.or(gallery_post_view))
.or(talk_index.or(talk_post_view)) .or(talk_index.or(talk_post_view))
.or(jsonfeed.or(atom).or(rss.or(sitemap))) .map(|reply| {
.or(files.or(css).or(favicon).or(sw.or(robots))) warp::reply::with_header(
reply,
"Cache-Control",
"public, max-age=3600, stale-if-error=60",
)
});
let site = static_pages
.or(dynamic_pages)
.or(healthcheck.or(metrics_endpoint).or(go_vanity_jsonfeed)) .or(healthcheck.or(metrics_endpoint).or(go_vanity_jsonfeed))
.or(files.or(css))
.map(|reply| { .map(|reply| {
warp::reply::with_header( warp::reply::with_header(
reply, reply,

View File

@ -1,7 +1,6 @@
/// This code was borrowed from @fasterthanlime. /// This code was borrowed from @fasterthanlime.
use color_eyre::eyre::Result;
use color_eyre::eyre::{Result}; use serde::{Deserialize, Serialize};
use serde::{Serialize, Deserialize};
#[derive(Eq, PartialEq, Deserialize, Default, Debug, Serialize, Clone)] #[derive(Eq, PartialEq, Deserialize, Default, Debug, Serialize, Clone)]
pub struct Data { pub struct Data {
@ -81,7 +80,7 @@ impl Data {
}; };
} }
} }
_ => panic!("Expected newline, got {:?}",), _ => panic!("Expected newline, got {:?}", ch),
}, },
State::ReadingFrontMatter { buf, line_start } => match ch { State::ReadingFrontMatter { buf, line_start } => match ch {
'-' if *line_start => { '-' if *line_start => {

View File

@ -20,7 +20,6 @@ impl Into<jsonfeed::Item> for Post {
let mut result = jsonfeed::Item::builder() let mut result = jsonfeed::Item::builder()
.title(self.front_matter.title) .title(self.front_matter.title)
.content_html(self.body_html) .content_html(self.body_html)
.content_text(self.body)
.id(format!("https://christine.website/{}", self.link)) .id(format!("https://christine.website/{}", self.link))
.url(format!("https://christine.website/{}", self.link)) .url(format!("https://christine.website/{}", self.link))
.date_published(self.date.to_rfc3339()) .date_published(self.date.to_rfc3339())