diff --git a/Cargo.lock b/Cargo.lock index 4370ea7..5a1e9e9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -644,7 +644,7 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "go_vanity" -version = "0.1.0" +version = "0.2.0" dependencies = [ "mime", "ructe", @@ -2364,6 +2364,16 @@ version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05e42f7c18b8f902290b009cde6d651262f956c98bc51bca4cd1d511c9cd85c7" +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom 0.2.1", + "serde", +] + [[package]] name = "vcpkg" version = "0.2.11" @@ -2626,6 +2636,7 @@ dependencies = [ "tracing-futures", "tracing-subscriber", "url", + "uuid", "warp", "xml-rs", ] diff --git a/Cargo.toml b/Cargo.toml index 48b0eee..f398181 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "xesite" -version = "2.1.0" +version = "2.2.0" authors = ["Christine Dodrill "] edition = "2018" build = "src/build.rs" @@ -34,6 +34,7 @@ tracing-subscriber = { version = "0.2", features = ["fmt"] } warp = "0.2" xml-rs = "0.8" url = "2" +uuid = { version = "0.8", features = ["serde", "v4"] } # workspace dependencies go_vanity = { path = "./lib/go_vanity" } diff --git a/blog/site-update-rss-bandwidth-2021-01-14.markdown b/blog/site-update-rss-bandwidth-2021-01-14.markdown new file mode 100644 index 0000000..ce68c48 --- /dev/null +++ b/blog/site-update-rss-bandwidth-2021-01-14.markdown @@ -0,0 +1,69 @@ +--- +title: "Site Update: RSS Bandwidth Fixes" +date: 2021-01-14 +tags: + - devops + - optimization +--- + +# Site Update: RSS Bandwidth Fixes + +Well, so I think I found out where my Kubernetes cluster cost came from. For +context, this blog gets a lot of traffic. Since the last deploy, my blog has +served its RSS feed over 19,000 times. I have some pretty naiive code powering +the RSS feed. It basically looked something like this: + +- Write RSS feed content-type and beginning of feed +- For every post I have ever made, include its metadata and content +- Write end of RSS feed + +This code was _fantastically simple_ to develop, however it was very expensive +in terms of bandwidth. When you add all this up, my RSS feed used to be more +than a _one megabyte_ response. It was also only getting larger as I posted more +content. + +This is unsustainable, so I have taken multiple actions to try and fix this from +several angles. + +

Rationale: this is my +most commonly hit and largest endpoint. I want to try and cut down its size. +

current feed (everything): 1356706 bytes
20 posts: 177931 bytes
10 +posts: 53004 bytes
5 posts: 29318 bytes pic.twitter.com/snjnn8RFh8

— Cadey +A. Ratio (@theprincessxena) January +15, 2021
+ +[Yes, that graph is showing in _gigabytes_. We're so lucky that bandwidth is +free on Hetzner.](conversation://Mara/hacker) + +First I finally set up the site to run behind Cloudflare. The Cloudflare +settings are set very permissively, so your RSS feed reading bots or whatever +should NOT be affected by this change. If you run into any side effects as a +result of this change, [contact me](/contact) and I can fix it. + +Second, I also now set cache control headers on every response. By default the +"static" pages are cached for a day and the "dynamic" pages are cached for 5 +minutes. This should allow new posts to show up quickly as they have previously. + +Thirdly, I set up +[ETags](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag) for the +feeds. Each of my feeds will send an ETag in a response header. Please use this +tag in future requests to ensure that you don't ask for content you already +have. From what I recall most RSS readers should already support this, however +I'll monitor the situation as reality demands. + +Lastly, I adjusted the +[ttl](https://cyber.harvard.edu/rss/rss.html#ltttlgtSubelementOfLtchannelgt) of +the RSS feed so that compliant feed readers should only check once per day. I've +seen some feed readers request the feed up to every 5 minutes, which is very +excessive. Hopefully this setting will gently nudge them into behaving. + +As a nice side effect I should have slightly lower ram usage on the blog server +too! Right now it's sitting at about 58 and a half MB of ram, however with fewer +copies of my posts sitting in memory this should fall by a significant amount. + +If you have any feedback about this, please [contact me](/contact) or mention me +on Twitter. I read my email frequently and am notified about Twitter mentions +very quickly. diff --git a/default.nix b/default.nix index 68536a1..0fa38c5 100644 --- a/default.nix +++ b/default.nix @@ -2,12 +2,17 @@ with pkgs; let + rust = pkgs.callPackage ./nix/rust.nix { }; + srcNoTarget = dir: builtins.filterSource (path: type: type != "directory" || builtins.baseNameOf path != "target") dir; - naersk = pkgs.callPackage sources.naersk { }; + naersk = pkgs.callPackage sources.naersk { + rustc = rust; + cargo = rust; + }; dhallpkgs = import sources.easy-dhall-nix { inherit pkgs; }; src = srcNoTarget ./.; diff --git a/docker.nix b/docker.nix deleted file mode 100644 index a49bb18..0000000 --- a/docker.nix +++ /dev/null @@ -1,23 +0,0 @@ -{ system ? builtins.currentSystem }: - -let - sources = import ./nix/sources.nix; - pkgs = import sources.nixpkgs { inherit system; }; - callPackage = pkgs.lib.callPackageWith pkgs; - site = callPackage ./default.nix { }; - - dockerImage = pkg: - pkgs.dockerTools.buildLayeredImage { - name = "xena/christinewebsite"; - tag = "latest"; - - contents = [ pkgs.cacert pkg ]; - - config = { - Cmd = [ "${pkg}/bin/xesite" ]; - Env = [ "CONFIG_FNAME=${pkg}/config.dhall" "RUST_LOG=info" ]; - WorkingDir = "/"; - }; - }; - -in dockerImage site diff --git a/lib/go_vanity/Cargo.toml b/lib/go_vanity/Cargo.toml index 90fa4f2..f4e5432 100644 --- a/lib/go_vanity/Cargo.toml +++ b/lib/go_vanity/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "go_vanity" -version = "0.1.0" +version = "0.2.0" authors = ["Christine Dodrill "] edition = "2018" build = "src/build.rs" diff --git a/lib/go_vanity/src/lib.rs b/lib/go_vanity/src/lib.rs index e4a11e2..756c555 100644 --- a/lib/go_vanity/src/lib.rs +++ b/lib/go_vanity/src/lib.rs @@ -1,12 +1,12 @@ +use crate::templates::RenderRucte; use warp::{http::Response, Rejection, Reply}; -use crate::templates::{RenderRucte}; include!(concat!(env!("OUT_DIR"), "/templates.rs")); -pub async fn gitea(pkg_name: &str, git_repo: &str) -> Result { - Response::builder().html(|o| templates::gitea_html(o, pkg_name, git_repo)) +pub async fn gitea(pkg_name: &str, git_repo: &str, branch: &str) -> Result { + Response::builder().html(|o| templates::gitea_html(o, pkg_name, git_repo, branch)) } -pub async fn github(pkg_name: &str, git_repo: &str) -> Result { - Response::builder().html(|o| templates::github_html(o, pkg_name, git_repo)) +pub async fn github(pkg_name: &str, git_repo: &str, branch: &str) -> Result { + Response::builder().html(|o| templates::github_html(o, pkg_name, git_repo, branch)) } diff --git a/lib/go_vanity/templates/gitea.rs.html b/lib/go_vanity/templates/gitea.rs.html index b20985b..f062d91 100644 --- a/lib/go_vanity/templates/gitea.rs.html +++ b/lib/go_vanity/templates/gitea.rs.html @@ -1,11 +1,11 @@ -@(pkg_name: &str, git_repo: &str) +@(pkg_name: &str, git_repo: &str, branch: &str) - + diff --git a/lib/go_vanity/templates/github.rs.html b/lib/go_vanity/templates/github.rs.html index 61f42e5..9782b1c 100644 --- a/lib/go_vanity/templates/github.rs.html +++ b/lib/go_vanity/templates/github.rs.html @@ -1,11 +1,11 @@ -@(pkg_name: &str, git_repo: &str) +@(pkg_name: &str, git_repo: &str, branch: &str) - + diff --git a/lib/jsonfeed/src/builder.rs b/lib/jsonfeed/src/builder.rs index f17740f..640a280 100644 --- a/lib/jsonfeed/src/builder.rs +++ b/lib/jsonfeed/src/builder.rs @@ -1,7 +1,7 @@ use std::default::Default; use errors::*; -use feed::{Feed, Author, Attachment}; +use feed::{Attachment, Author, Feed}; use item::{Content, Item}; /// Feed Builder @@ -160,7 +160,7 @@ impl ItemBuilder { match self.content { Some(Content::Text(t)) => { self.content = Some(Content::Both(i.into(), t)); - }, + } _ => { self.content = Some(Content::Html(i.into())); } @@ -172,10 +172,10 @@ impl ItemBuilder { match self.content { Some(Content::Html(s)) => { self.content = Some(Content::Both(s, i.into())); - }, + } _ => { self.content = Some(Content::Text(i.into())); - }, + } } self } @@ -197,8 +197,7 @@ impl ItemBuilder { date_modified: self.date_modified, author: self.author, tags: self.tags, - attachments: self.attachments + attachments: self.attachments, }) } } - diff --git a/lib/jsonfeed/src/errors.rs b/lib/jsonfeed/src/errors.rs index 936b7ec..b94779c 100644 --- a/lib/jsonfeed/src/errors.rs +++ b/lib/jsonfeed/src/errors.rs @@ -1,7 +1,6 @@ use serde_json; -error_chain!{ +error_chain! { foreign_links { Serde(serde_json::Error); } } - diff --git a/lib/jsonfeed/src/feed.rs b/lib/jsonfeed/src/feed.rs index 8b5b5ce..320feb6 100644 --- a/lib/jsonfeed/src/feed.rs +++ b/lib/jsonfeed/src/feed.rs @@ -1,7 +1,7 @@ use std::default::Default; -use item::Item; use builder::Builder; +use item::Item; const VERSION_1: &'static str = "https://jsonfeed.org/version/1"; @@ -145,9 +145,9 @@ pub struct Hub { #[cfg(test)] mod tests { + use super::*; use serde_json; use std::default::Default; - use super::*; #[test] fn serialize_feed() { @@ -168,18 +168,16 @@ mod tests { #[test] fn deserialize_feed() { - let json = r#"{"version":"https://jsonfeed.org/version/1","title":"some title","items":[]}"#; + let json = + r#"{"version":"https://jsonfeed.org/version/1","title":"some title","items":[]}"#; let feed: Feed = serde_json::from_str(&json).unwrap(); let expected = Feed { version: "https://jsonfeed.org/version/1".to_string(), - title: "some title".to_string(), - items: vec![], - ..Default::default() + title: "some title".to_string(), + items: vec![], + ..Default::default() }; - assert_eq!( - feed, - expected - ); + assert_eq!(feed, expected); } #[test] @@ -208,10 +206,7 @@ mod tests { size_in_bytes: Some(1), duration_in_seconds: Some(1), }; - assert_eq!( - attachment, - expected - ); + assert_eq!(attachment, expected); } #[test] @@ -229,17 +224,15 @@ mod tests { #[test] fn deserialize_author() { - let json = r#"{"name":"bob jones","url":"http://example.com","avatar":"http://img.com/blah"}"#; + let json = + r#"{"name":"bob jones","url":"http://example.com","avatar":"http://img.com/blah"}"#; let author: Author = serde_json::from_str(&json).unwrap(); let expected = Author { name: Some("bob jones".to_string()), url: Some("http://example.com".to_string()), avatar: Some("http://img.com/blah".to_string()), }; - assert_eq!( - author, - expected - ); + assert_eq!(author, expected); } #[test] @@ -262,10 +255,7 @@ mod tests { type_: "some-type".to_string(), url: "http://example.com".to_string(), }; - assert_eq!( - hub, - expected - ); + assert_eq!(hub, expected); } #[test] diff --git a/lib/jsonfeed/src/item.rs b/lib/jsonfeed/src/item.rs index 605525b..0f7d6ab 100644 --- a/lib/jsonfeed/src/item.rs +++ b/lib/jsonfeed/src/item.rs @@ -1,11 +1,11 @@ -use std::fmt; use std::default::Default; +use std::fmt; -use feed::{Author, Attachment}; use builder::ItemBuilder; +use feed::{Attachment, Author}; -use serde::ser::{Serialize, Serializer, SerializeStruct}; -use serde::de::{self, Deserialize, Deserializer, Visitor, MapAccess}; +use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor}; +use serde::ser::{Serialize, SerializeStruct, Serializer}; /// Represents the `content_html` and `content_text` attributes of an item #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] @@ -61,7 +61,8 @@ impl Default for Item { impl Serialize for Item { fn serialize(&self, serializer: S) -> Result - where S: Serializer + where + S: Serializer, { let mut state = serializer.serialize_struct("Item", 14)?; state.serialize_field("id", &self.id)?; @@ -78,15 +79,15 @@ impl Serialize for Item { Content::Html(ref s) => { state.serialize_field("content_html", s)?; state.serialize_field("content_text", &None::>)?; - }, + } Content::Text(ref s) => { state.serialize_field("content_html", &None::>)?; state.serialize_field("content_text", s)?; - }, + } Content::Both(ref s, ref t) => { state.serialize_field("content_html", s)?; state.serialize_field("content_text", t)?; - }, + } }; if self.summary.is_some() { state.serialize_field("summary", &self.summary)?; @@ -117,8 +118,9 @@ impl Serialize for Item { } impl<'de> Deserialize<'de> for Item { - fn deserialize(deserializer: D) -> Result - where D: Deserializer<'de> + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, { enum Field { Id, @@ -135,11 +137,12 @@ impl<'de> Deserialize<'de> for Item { Author, Tags, Attachments, - }; + } impl<'de> Deserialize<'de> for Field { fn deserialize(deserializer: D) -> Result - where D: Deserializer<'de> + where + D: Deserializer<'de>, { struct FieldVisitor; @@ -151,7 +154,8 @@ impl<'de> Deserialize<'de> for Item { } fn visit_str(self, value: &str) -> Result - where E: de::Error + where + E: de::Error, { match value { "id" => Ok(Field::Id), @@ -186,7 +190,8 @@ impl<'de> Deserialize<'de> for Item { } fn visit_map(self, mut map: V) -> Result - where V: MapAccess<'de> + where + V: MapAccess<'de>, { let mut id = None; let mut url = None; @@ -210,99 +215,93 @@ impl<'de> Deserialize<'de> for Item { return Err(de::Error::duplicate_field("id")); } id = Some(map.next_value()?); - }, + } Field::Url => { if url.is_some() { return Err(de::Error::duplicate_field("url")); } url = map.next_value()?; - }, + } Field::ExternalUrl => { if external_url.is_some() { return Err(de::Error::duplicate_field("external_url")); } external_url = map.next_value()?; - }, + } Field::Title => { if title.is_some() { return Err(de::Error::duplicate_field("title")); } title = map.next_value()?; - }, + } Field::ContentHtml => { if content_html.is_some() { return Err(de::Error::duplicate_field("content_html")); } content_html = map.next_value()?; - }, + } Field::ContentText => { if content_text.is_some() { return Err(de::Error::duplicate_field("content_text")); } content_text = map.next_value()?; - }, + } Field::Summary => { if summary.is_some() { return Err(de::Error::duplicate_field("summary")); } summary = map.next_value()?; - }, + } Field::Image => { if image.is_some() { return Err(de::Error::duplicate_field("image")); } image = map.next_value()?; - }, + } Field::BannerImage => { if banner_image.is_some() { return Err(de::Error::duplicate_field("banner_image")); } banner_image = map.next_value()?; - }, + } Field::DatePublished => { if date_published.is_some() { return Err(de::Error::duplicate_field("date_published")); } date_published = map.next_value()?; - }, + } Field::DateModified => { if date_modified.is_some() { return Err(de::Error::duplicate_field("date_modified")); } date_modified = map.next_value()?; - }, + } Field::Author => { if author.is_some() { return Err(de::Error::duplicate_field("author")); } author = map.next_value()?; - }, + } Field::Tags => { if tags.is_some() { return Err(de::Error::duplicate_field("tags")); } tags = map.next_value()?; - }, + } Field::Attachments => { if attachments.is_some() { return Err(de::Error::duplicate_field("attachments")); } attachments = map.next_value()?; - }, + } } } let id = id.ok_or_else(|| de::Error::missing_field("id"))?; let content = match (content_html, content_text) { - (Some(s), Some(t)) => { - Content::Both(s.to_string(), t.to_string()) - }, - (Some(s), _) => { - Content::Html(s.to_string()) - }, - (_, Some(t)) => { - Content::Text(t.to_string()) - }, + (Some(s), Some(t)) => Content::Both(s.to_string(), t.to_string()), + (Some(s), _) => Content::Html(s.to_string()), + (_, Some(t)) => Content::Text(t.to_string()), _ => return Err(de::Error::missing_field("content_html or content_text")), }; @@ -363,7 +362,12 @@ mod tests { banner_image: Some("http://img.com/blah".into()), date_published: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()), - author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), + author: Some( + Author::new() + .name("bob jones") + .url("http://example.com") + .avatar("http://img.com/blah"), + ), tags: Some(vec!["json".into(), "feed".into()]), attachments: Some(vec![]), }; @@ -387,7 +391,12 @@ mod tests { banner_image: Some("http://img.com/blah".into()), date_published: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()), - author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), + author: Some( + Author::new() + .name("bob jones") + .url("http://example.com") + .avatar("http://img.com/blah"), + ), tags: Some(vec!["json".into(), "feed".into()]), attachments: Some(vec![]), }; @@ -411,7 +420,12 @@ mod tests { banner_image: Some("http://img.com/blah".into()), date_published: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()), - author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), + author: Some( + Author::new() + .name("bob jones") + .url("http://example.com") + .avatar("http://img.com/blah"), + ), tags: Some(vec!["json".into(), "feed".into()]), attachments: Some(vec![]), }; @@ -437,7 +451,12 @@ mod tests { banner_image: Some("http://img.com/blah".into()), date_published: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()), - author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), + author: Some( + Author::new() + .name("bob jones") + .url("http://example.com") + .avatar("http://img.com/blah"), + ), tags: Some(vec!["json".into(), "feed".into()]), attachments: Some(vec![]), }; @@ -460,7 +479,12 @@ mod tests { banner_image: Some("http://img.com/blah".into()), date_published: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()), - author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), + author: Some( + Author::new() + .name("bob jones") + .url("http://example.com") + .avatar("http://img.com/blah"), + ), tags: Some(vec!["json".into(), "feed".into()]), attachments: Some(vec![]), }; @@ -483,11 +507,15 @@ mod tests { banner_image: Some("http://img.com/blah".into()), date_published: Some("2017-01-01 10:00:00".into()), date_modified: Some("2017-01-01 10:00:00".into()), - author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")), + author: Some( + Author::new() + .name("bob jones") + .url("http://example.com") + .avatar("http://img.com/blah"), + ), tags: Some(vec!["json".into(), "feed".into()]), attachments: Some(vec![]), }; assert_eq!(item, expected); } } - diff --git a/lib/jsonfeed/src/lib.rs b/lib/jsonfeed/src/lib.rs index bc1d94e..812083e 100644 --- a/lib/jsonfeed/src/lib.rs +++ b/lib/jsonfeed/src/lib.rs @@ -2,7 +2,7 @@ //! instead of XML //! //! This crate can serialize and deserialize between JSON Feed strings -//! and Rust data structures. It also allows for programmatically building +//! and Rust data structures. It also allows for programmatically building //! a JSON Feed //! //! Example: @@ -40,18 +40,20 @@ //! ``` extern crate serde; -#[macro_use] extern crate error_chain; -#[macro_use] extern crate serde_derive; +#[macro_use] +extern crate error_chain; +#[macro_use] +extern crate serde_derive; extern crate serde_json; -mod errors; -mod item; -mod feed; mod builder; +mod errors; +mod feed; +mod item; pub use errors::*; +pub use feed::{Attachment, Author, Feed}; pub use item::*; -pub use feed::{Feed, Author, Attachment}; use std::io::Write; @@ -116,14 +118,16 @@ pub fn to_vec_pretty(value: &Feed) -> Result> { /// Serialize a Feed to JSON and output to an IO stream pub fn to_writer(writer: W, value: &Feed) -> Result<()> - where W: Write +where + W: Write, { Ok(serde_json::to_writer(writer, value)?) } /// Serialize a Feed to pretty-printed JSON and output to an IO stream pub fn to_writer_pretty(writer: W, value: &Feed) -> Result<()> - where W: Write +where + W: Write, { Ok(serde_json::to_writer_pretty(writer, value)?) } @@ -137,10 +141,7 @@ mod tests { fn from_str() { let feed = r#"{"version": "https://jsonfeed.org/version/1","title":"","items":[]}"#; let expected = Feed::default(); - assert_eq!( - super::from_str(&feed).unwrap(), - expected - ); + assert_eq!(super::from_str(&feed).unwrap(), expected); } #[test] fn from_reader() { @@ -148,39 +149,27 @@ mod tests { let feed = feed.as_bytes(); let feed = Cursor::new(feed); let expected = Feed::default(); - assert_eq!( - super::from_reader(feed).unwrap(), - expected - ); + assert_eq!(super::from_reader(feed).unwrap(), expected); } #[test] fn from_slice() { let feed = r#"{"version": "https://jsonfeed.org/version/1","title":"","items":[]}"#; let feed = feed.as_bytes(); let expected = Feed::default(); - assert_eq!( - super::from_slice(&feed).unwrap(), - expected - ); + assert_eq!(super::from_slice(&feed).unwrap(), expected); } #[test] fn from_value() { let feed = r#"{"version": "https://jsonfeed.org/version/1","title":"","items":[]}"#; let feed: serde_json::Value = serde_json::from_str(&feed).unwrap(); let expected = Feed::default(); - assert_eq!( - super::from_value(feed).unwrap(), - expected - ); + assert_eq!(super::from_value(feed).unwrap(), expected); } #[test] fn to_string() { let feed = Feed::default(); let expected = r#"{"version":"https://jsonfeed.org/version/1","title":"","items":[]}"#; - assert_eq!( - super::to_string(&feed).unwrap(), - expected - ); + assert_eq!(super::to_string(&feed).unwrap(), expected); } #[test] fn to_string_pretty() { @@ -190,28 +179,19 @@ mod tests { "title": "", "items": [] }"#; - assert_eq!( - super::to_string_pretty(&feed).unwrap(), - expected - ); + assert_eq!(super::to_string_pretty(&feed).unwrap(), expected); } #[test] fn to_value() { let feed = r#"{"version":"https://jsonfeed.org/version/1","title":"","items":[]}"#; let expected: serde_json::Value = serde_json::from_str(&feed).unwrap(); - assert_eq!( - super::to_value(Feed::default()).unwrap(), - expected - ); + assert_eq!(super::to_value(Feed::default()).unwrap(), expected); } #[test] fn to_vec() { let feed = r#"{"version":"https://jsonfeed.org/version/1","title":"","items":[]}"#; let expected = feed.as_bytes(); - assert_eq!( - super::to_vec(&Feed::default()).unwrap(), - expected - ); + assert_eq!(super::to_vec(&Feed::default()).unwrap(), expected); } #[test] fn to_vec_pretty() { @@ -221,10 +201,7 @@ mod tests { "items": [] }"#; let expected = feed.as_bytes(); - assert_eq!( - super::to_vec_pretty(&Feed::default()).unwrap(), - expected - ); + assert_eq!(super::to_vec_pretty(&Feed::default()).unwrap(), expected); } #[test] fn to_writer() { @@ -249,4 +226,3 @@ mod tests { assert_eq!(result, feed); } } - diff --git a/nix/rust.nix b/nix/rust.nix index 725f042..8215513 100644 --- a/nix/rust.nix +++ b/nix/rust.nix @@ -4,7 +4,7 @@ let pkgs = import sources.nixpkgs { overlays = [ (import sources.nixpkgs-mozilla) ]; }; channel = "nightly"; - date = "2020-11-25"; + date = "2021-01-14"; targets = [ ]; - chan = pkgs.latest.rustChannels.stable.rust; + chan = pkgs.rustChannelOfTargets channel date targets; in chan diff --git a/shell.nix b/shell.nix index 1d1c111..af22215 100644 --- a/shell.nix +++ b/shell.nix @@ -5,7 +5,7 @@ let dhall-yaml = dhallpkgs.dhall-yaml-simple; dhall = dhallpkgs.dhall-simple; xepkgs = import sources.xepkgs { inherit pkgs; }; - rust = import ./nix/rust.nix { }; + rust = pkgs.callPackage ./nix/rust.nix { }; in with pkgs; with xepkgs; mkShell { diff --git a/src/app/mod.rs b/src/app/mod.rs index 7cb0044..e763792 100644 --- a/src/app/mod.rs +++ b/src/app/mod.rs @@ -86,6 +86,8 @@ pub async fn init(cfg: PathBuf) -> Result { everything.sort(); everything.reverse(); + let everything: Vec = everything.into_iter().take(20).collect(); + let mut jfb = jsonfeed::Feed::builder() .title("Christine Dodrill's Blog") .description("My blog posts and rants about various technology things.") diff --git a/src/handlers/feeds.rs b/src/handlers/feeds.rs index 2022393..5b5987d 100644 --- a/src/handlers/feeds.rs +++ b/src/handlers/feeds.rs @@ -11,10 +11,11 @@ lazy_static! { &["kind"] ) .unwrap(); + pub static ref ETAG: String = format!(r#"W/"{}""#, uuid::Uuid::new_v4().to_simple()); } #[instrument(skip(state))] -pub async fn jsonfeed(state: Arc) -> Result { +pub async fn jsonfeed(state: Arc, since: Option) -> Result { HIT_COUNTER.with_label_values(&["json"]).inc(); let state = state.clone(); Ok(warp::reply::json(&state.jf)) @@ -29,7 +30,22 @@ pub enum RenderError { impl warp::reject::Reject for RenderError {} #[instrument(skip(state))] -pub async fn atom(state: Arc) -> Result { +pub async fn atom(state: Arc, since: Option) -> Result { + if let Some(etag) = since { + if etag == ETAG.clone() { + return Response::builder() + .status(304) + .header("Content-Type", "text/plain") + .body( + "You already have the newest version of this feed." + .to_string() + .into_bytes(), + ) + .map_err(RenderError::Build) + .map_err(warp::reject::custom); + } + } + HIT_COUNTER.with_label_values(&["atom"]).inc(); let state = state.clone(); let mut buf = Vec::new(); @@ -39,13 +55,29 @@ pub async fn atom(state: Arc) -> Result { Response::builder() .status(200) .header("Content-Type", "application/atom+xml") + .header("ETag", ETAG.clone()) .body(buf) .map_err(RenderError::Build) .map_err(warp::reject::custom) } #[instrument(skip(state))] -pub async fn rss(state: Arc) -> Result { +pub async fn rss(state: Arc, since: Option) -> Result { + if let Some(etag) = since { + if etag == ETAG.clone() { + return Response::builder() + .status(304) + .header("Content-Type", "text/plain") + .body( + "You already have the newest version of this feed." + .to_string() + .into_bytes(), + ) + .map_err(RenderError::Build) + .map_err(warp::reject::custom); + } + } + HIT_COUNTER.with_label_values(&["rss"]).inc(); let state = state.clone(); let mut buf = Vec::new(); @@ -55,6 +87,7 @@ pub async fn rss(state: Arc) -> Result { Response::builder() .status(200) .header("Content-Type", "application/rss+xml") + .header("ETag", ETAG.clone()) .body(buf) .map_err(RenderError::Build) .map_err(warp::reject::custom) diff --git a/src/main.rs b/src/main.rs index c05ac49..285bb93 100644 --- a/src/main.rs +++ b/src/main.rs @@ -113,20 +113,39 @@ async fn main() -> Result<()> { .and(with_state(state.clone())) .and_then(handlers::patrons); - let files = warp::path("static").and(warp::fs::dir("./static")); - let css = warp::path("css").and(warp::fs::dir("./css")); + let files = warp::path("static") + .and(warp::fs::dir("./static")) + .map(|reply| { + warp::reply::with_header( + reply, + "Cache-Control", + "public, max-age=86400, stale-if-error=60", + ) + }); + + let css = warp::path("css").and(warp::fs::dir("./css")).map(|reply| { + warp::reply::with_header( + reply, + "Cache-Control", + "public, max-age=86400, stale-if-error=60", + ) + }); + let sw = warp::path("sw.js").and(warp::fs::file("./static/js/sw.js")); let robots = warp::path("robots.txt").and(warp::fs::file("./static/robots.txt")); let favicon = warp::path("favicon.ico").and(warp::fs::file("./static/favicon/favicon.ico")); let jsonfeed = warp::path("blog.json") .and(with_state(state.clone())) + .and(warp::header::optional("if-none-match")) .and_then(handlers::feeds::jsonfeed); let atom = warp::path("blog.atom") .and(with_state(state.clone())) + .and(warp::header::optional("if-none-match")) .and_then(handlers::feeds::atom); let rss = warp::path("blog.rss") .and(with_state(state.clone())) + .and(warp::header::optional("if-none-match")) .and_then(handlers::feeds::rss); let sitemap = warp::path("sitemap.xml") .and(with_state(state.clone())) @@ -135,6 +154,7 @@ async fn main() -> Result<()> { let go_vanity_jsonfeed = warp::path("jsonfeed") .and(warp::any().map(move || "christine.website/jsonfeed")) .and(warp::any().map(move || "https://tulpa.dev/Xe/jsonfeed")) + .and(warp::any().map(move || "master")) .and_then(go_vanity::gitea); let metrics_endpoint = warp::path("metrics").and(warp::path::end()).map(move || { @@ -149,14 +169,37 @@ async fn main() -> Result<()> { .unwrap() }); - let site = index - .or(contact.or(feeds).or(resume.or(signalboost)).or(patrons)) - .or(blog_index.or(series.or(series_view).or(post_view))) + let static_pages = index + .or(feeds) + .or(resume.or(signalboost)) + .or(patrons) + .or(jsonfeed.or(atom.or(sitemap)).or(rss)) + .or(favicon.or(robots).or(sw)) + .or(contact) + .map(|reply| { + warp::reply::with_header( + reply, + "Cache-Control", + "public, max-age=86400, stale-if-error=60", + ) + }); + + let dynamic_pages = blog_index + .or(series.or(series_view).or(post_view)) .or(gallery_index.or(gallery_post_view)) .or(talk_index.or(talk_post_view)) - .or(jsonfeed.or(atom).or(rss.or(sitemap))) - .or(files.or(css).or(favicon).or(sw.or(robots))) + .map(|reply| { + warp::reply::with_header( + reply, + "Cache-Control", + "public, max-age=600, stale-if-error=60", + ) + }); + + let site = static_pages + .or(dynamic_pages) .or(healthcheck.or(metrics_endpoint).or(go_vanity_jsonfeed)) + .or(files.or(css)) .map(|reply| { warp::reply::with_header( reply, diff --git a/src/post/frontmatter.rs b/src/post/frontmatter.rs index 615f2c5..f2c1587 100644 --- a/src/post/frontmatter.rs +++ b/src/post/frontmatter.rs @@ -1,7 +1,6 @@ /// This code was borrowed from @fasterthanlime. - -use color_eyre::eyre::{Result}; -use serde::{Serialize, Deserialize}; +use color_eyre::eyre::Result; +use serde::{Deserialize, Serialize}; #[derive(Eq, PartialEq, Deserialize, Default, Debug, Serialize, Clone)] pub struct Data { @@ -81,7 +80,7 @@ impl Data { }; } } - _ => panic!("Expected newline, got {:?}",), + _ => panic!("Expected newline, got {:?}", ch), }, State::ReadingFrontMatter { buf, line_start } => match ch { '-' if *line_start => { diff --git a/src/post/mod.rs b/src/post/mod.rs index 79affbc..eb8ee54 100644 --- a/src/post/mod.rs +++ b/src/post/mod.rs @@ -20,7 +20,6 @@ impl Into for Post { let mut result = jsonfeed::Item::builder() .title(self.front_matter.title) .content_html(self.body_html) - .content_text(self.body) .id(format!("https://christine.website/{}", self.link)) .url(format!("https://christine.website/{}", self.link)) .date_published(self.date.to_rfc3339()) diff --git a/templates/blog_rss.rs.xml b/templates/blog_rss.rs.xml index 15e3c82..0e87ad1 100644 --- a/templates/blog_rss.rs.xml +++ b/templates/blog_rss.rs.xml @@ -9,6 +9,7 @@ https://christine.website/blog Tech, philosophy and more @APP https://github.com/Xe/site + 1440 @for post in posts { https://christine.website/@post.link diff --git a/templates/footer.rs.html b/templates/footer.rs.html index a7540e8..6ae18e6 100644 --- a/templates/footer.rs.html +++ b/templates/footer.rs.html @@ -7,7 +7,7 @@
Copyright 2020 Christine Dodrill. Any and all opinions listed here are my own and not representative of my employers; future, past and present.

Looking for someone for your team? Take a look here.

-

Served by @APP running @env!("out")/bin/xesite, see source code here.

+

Served by @env!("out")/bin/xesite, see source code here.