Rewrite site backend in Rust (#178)
* add shell.nix changes for Rust #176 * set up base crate layout * add first set of dependencies * start adding basic app modules * start html templates * serve index page * add contact and feeds pages * add resume rendering support * resume cleanups * get signalboost page working * rewrite config to be in dhall * more work * basic generic post loading * more tests * initial blog index support * fix routing? * render blogposts * X-Clacks-Overhead * split blog handlers into blog.rs * gallery index * gallery posts * fix hashtags * remove instantpage (it messes up the metrics) * talk support + prometheus * Create rust.yml * Update rust.yml * Update codeql-analysis.yml * add jsonfeed library * jsonfeed support * rss/atom * go mod tidy * atom: add posted date * rss: add publishing date * nix: build rust program * rip out go code * rip out go templates * prepare for serving in docker * create kubernetes deployment * create automagic deployment * build docker images on non-master * more fixes * fix timestamps * fix RSS/Atom/JSONFeed validation errors * add go vanity import redirecting * templates/header: remove this * atom feed: fixes * fix? * fix?? * fix rust tests * Update rust.yml * automatically show snow during the winter * fix dates * show commit link in footer * sitemap support * fix compiler warning * start basic patreon client * integrate kankyo * fix patreon client * add patrons page * remove this * handle patron errors better * fix build * clean up deploy * sort envvars for deploy * remove deps.nix * shell.nix: remove go * update README * fix envvars for tests * nice * blog: add rewrite in rust post * blog/site-update: more words
This commit is contained in:
parent
449e934246
commit
385d25c9f9
|
@ -1,2 +1 @@
|
|||
nix/deps.nix linguist-vendored
|
||||
nix/sources.nix linguist-vendored
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
name: "Code scanning - action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 18 * * 6'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
with:
|
||||
languages: go
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
|
@ -1,21 +0,0 @@
|
|||
name: Go
|
||||
on:
|
||||
- push
|
||||
- pull_request
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Go 1.14
|
||||
uses: actions/setup-go@v1
|
||||
with:
|
||||
go-version: 1.14
|
||||
id: go
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v1
|
||||
- name: Test
|
||||
run: go test -v ./...
|
||||
env:
|
||||
GO111MODULE: on
|
||||
GOPROXY: https://cache.greedo.xeserv.us
|
|
@ -1,80 +0,0 @@
|
|||
name: "CI/CD"
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Build container image
|
||||
run: |
|
||||
docker build -t xena/christinewebsite:$(echo $GITHUB_SHA | head -c7) .
|
||||
echo $DOCKER_PASSWORD | docker login -u $DOCKER_USERNAME --password-stdin
|
||||
docker push xena/christinewebsite
|
||||
env:
|
||||
DOCKER_USERNAME: "xena"
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Download secrets/Install/Configure/Use Dyson
|
||||
run: |
|
||||
mkdir ~/.ssh
|
||||
echo $FILE_DATA | base64 -d > ~/.ssh/id_rsa
|
||||
md5sum ~/.ssh/id_rsa
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
git clone git@ssh.tulpa.dev:cadey/within-terraform-secret
|
||||
curl https://xena.greedo.xeserv.us/files/dyson-linux-amd64-0.1.0.tgz | tar xz
|
||||
cp ./dyson-linux-amd64-0.1.1/dyson .
|
||||
rm -rf dyson-linux-amd64-0.1.1
|
||||
mkdir -p ~/.config/dyson
|
||||
|
||||
echo '[DigitalOcean]
|
||||
Token = ""
|
||||
|
||||
[Cloudflare]
|
||||
Email = ""
|
||||
Token = ""
|
||||
|
||||
[Secrets]
|
||||
GitCheckout = "./within-terraform-secret"' > ~/.config/dyson/dyson.ini
|
||||
|
||||
./dyson manifest \
|
||||
--name=christinewebsite \
|
||||
--domain=christine.website \
|
||||
--dockerImage=xena/christinewebsite:$(echo $GITHUB_SHA | head -c7) \
|
||||
--containerPort=5000 \
|
||||
--replicas=2 \
|
||||
--useProdLE=true > $GITHUB_WORKSPACE/deploy.yml
|
||||
env:
|
||||
FILE_DATA: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
GIT_SSH_COMMAND: "ssh -i ~/.ssh/id_rsa -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
|
||||
- name: Save DigitalOcean kubeconfig
|
||||
uses: digitalocean/action-doctl@master
|
||||
env:
|
||||
DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_TOKEN }}
|
||||
with:
|
||||
args: kubernetes cluster kubeconfig show kubermemes > $GITHUB_WORKSPACE/.kubeconfig
|
||||
- name: Deploy to DigitalOcean Kubernetes
|
||||
uses: docker://lachlanevenson/k8s-kubectl
|
||||
with:
|
||||
args: --kubeconfig=/github/workspace/.kubeconfig apply -n apps -f /github/workspace/deploy.yml
|
||||
- name: Verify deployment
|
||||
uses: docker://lachlanevenson/k8s-kubectl
|
||||
with:
|
||||
args: --kubeconfig=/github/workspace/.kubeconfig rollout status -n apps deployment/christinewebsite
|
||||
- name: Ping Google
|
||||
uses: docker://lachlanevenson/k8s-kubectl
|
||||
with:
|
||||
args: --kubeconfig=/github/workspace/.kubeconfig apply -f /github/workspace/k8s/job.yml
|
||||
- name: Sleep
|
||||
run: |
|
||||
sleep 5
|
||||
- name: Don't Ping Google
|
||||
uses: docker://lachlanevenson/k8s-kubectl
|
||||
with:
|
||||
args: --kubeconfig=/github/workspace/.kubeconfig delete -f /github/workspace/k8s/job.yml
|
||||
- name: POSSE
|
||||
env:
|
||||
MI_TOKEN: ${{ secrets.MI_TOKEN }}
|
||||
run: |
|
||||
curl -H "Authorization: $MI_TOKEN" --data "https://christine.website/blog.json" https://mi.within.website/blog/refresh
|
|
@ -1,16 +1,42 @@
|
|||
name: "Nix"
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
tests:
|
||||
docker-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: cachix/install-nix-action@v6
|
||||
- uses: cachix/cachix-action@v3
|
||||
with:
|
||||
name: xe
|
||||
- run: |
|
||||
nix-build docker.nix
|
||||
docker load -i result
|
||||
docker tag xena/christinewebsite:latest xena/christinewebsite:$(echo $GITHUB_SHA | head -c7)
|
||||
- uses: actions/checkout@v1
|
||||
- uses: cachix/install-nix-action@v6
|
||||
- uses: cachix/cachix-action@v3
|
||||
with:
|
||||
name: xe
|
||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
- run: |
|
||||
docker load -i result
|
||||
docker tag xena/christinewebsite:latest xena/christinewebsite:$GITHUB_SHA
|
||||
echo $DOCKER_PASSWORD | docker login -u $DOCKER_USERNAME --password-stdin
|
||||
docker push xena/christinewebsite
|
||||
env:
|
||||
DOCKER_USERNAME: "xena"
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: docker-build
|
||||
if: github.ref == 'refs/heads/master'
|
||||
steps:
|
||||
- uses: cachix/install-nix-action@v6
|
||||
- name: deploy
|
||||
run: ./scripts/release.sh
|
||||
env:
|
||||
DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_TOKEN }}
|
||||
MI_TOKEN: ${{ secrets.MI_TOKEN }}
|
||||
PATREON_ACCESS_TOKEN: ${{ secrets.PATREON_ACCESS_TOKEN }}
|
||||
PATREON_CLIENT_ID: ${{ secrets.PATREON_CLIENT_ID }}
|
||||
PATREON_CLIENT_SECRET: ${{ secrets.PATREON_CLIENT_SECRET }}
|
||||
PATREON_REFRESH_TOKEN: ${{ secrets.PATREON_REFRESH_TOKEN }}
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
name: Rust
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: cargo build --all
|
||||
- name: Run tests
|
||||
run: |
|
||||
cargo test
|
||||
(cd lib/jsonfeed && cargo test)
|
||||
(cd lib/patreon && cargo test)
|
||||
env:
|
||||
PATREON_ACCESS_TOKEN: ${{ secrets.PATREON_ACCESS_TOKEN }}
|
||||
PATREON_CLIENT_ID: ${{ secrets.PATREON_CLIENT_ID }}
|
||||
PATREON_CLIENT_SECRET: ${{ secrets.PATREON_CLIENT_SECRET }}
|
||||
PATREON_REFRESH_TOKEN: ${{ secrets.PATREON_REFRESH_TOKEN }}
|
|
@ -5,4 +5,4 @@ cw.tar
|
|||
/result-*
|
||||
/result
|
||||
.#*
|
||||
|
||||
/target
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,48 @@
|
|||
[package]
|
||||
name = "xesite"
|
||||
version = "2.0.0"
|
||||
authors = ["Christine Dodrill <me@christine.website>"]
|
||||
edition = "2018"
|
||||
build = "src/build.rs"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
atom_syndication = { version = "0.9", features = ["with-serde"] }
|
||||
chrono = "0.4"
|
||||
comrak = "0.8"
|
||||
envy = "0.4"
|
||||
glob = "0.3"
|
||||
hyper = "0.13"
|
||||
kankyo = "0.3"
|
||||
lazy_static = "1.4"
|
||||
log = "0"
|
||||
mime = "0.3.0"
|
||||
pretty_env_logger = "0"
|
||||
prometheus = { version = "0.9", default-features = false, features = ["process"] }
|
||||
rand = "0"
|
||||
rss = "1"
|
||||
serde_dhall = "0.5.3"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_yaml = "0.8"
|
||||
sitemap = "0.4"
|
||||
thiserror = "1"
|
||||
tokio = { version = "0.2", features = ["macros"] }
|
||||
warp = "0.2"
|
||||
xml-rs = "0.8"
|
||||
|
||||
# workspace dependencies
|
||||
go_vanity = { path = "./lib/go_vanity" }
|
||||
jsonfeed = { path = "./lib/jsonfeed" }
|
||||
patreon = { path = "./lib/patreon" }
|
||||
|
||||
[build-dependencies]
|
||||
ructe = { version = "0.11", features = ["warp02"] }
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"./lib/go_vanity",
|
||||
"./lib/jsonfeed",
|
||||
"./lib/patreon"
|
||||
]
|
20
Dockerfile
20
Dockerfile
|
@ -1,20 +0,0 @@
|
|||
FROM xena/go:1.14 AS build
|
||||
ENV GOPROXY https://cache.greedo.xeserv.us
|
||||
COPY . /site
|
||||
WORKDIR /site
|
||||
RUN CGO_ENABLED=0 go test -v ./...
|
||||
RUN CGO_ENABLED=0 GOBIN=/root go install -v ./cmd/site
|
||||
|
||||
FROM xena/alpine
|
||||
EXPOSE 5000
|
||||
WORKDIR /site
|
||||
COPY --from=build /root/site .
|
||||
COPY ./static /site/static
|
||||
COPY ./templates /site/templates
|
||||
COPY ./blog /site/blog
|
||||
COPY ./talks /site/talks
|
||||
COPY ./gallery /site/gallery
|
||||
COPY ./css /site/css
|
||||
COPY ./signalboost.dhall /site/signalboost.dhall
|
||||
HEALTHCHECK CMD wget --spider http://127.0.0.1:5000/.within/health || exit 1
|
||||
CMD ./site
|
2
LICENSE
2
LICENSE
|
@ -1,4 +1,4 @@
|
|||
Copyright (c) 2017 Christine Dodrill <me@christine.website>
|
||||
Copyright (c) 2017-2020 Christine Dodrill <me@christine.website>
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
# site
|
||||
|
||||
My personal/portfolio website.
|
||||
[![built with
|
||||
nix](https://builtwithnix.org/badge.svg)](https://builtwithnix.org)
|
||||
![Nix](https://github.com/Xe/site/workflows/Nix/badge.svg)
|
||||
![Rust](https://github.com/Xe/site/workflows/Rust/badge.svg)
|
||||
|
||||
![https://puu.sh/vWnJx/57cda175d8.png](https://puu.sh/vWnJx/57cda175d8.png)
|
||||
My personal/portfolio website.
|
||||
|
|
|
@ -0,0 +1,189 @@
|
|||
---
|
||||
title: "Site Update: Rewrite in Rust"
|
||||
date: 2020-07-16
|
||||
tags:
|
||||
- rust
|
||||
---
|
||||
|
||||
# Site Update: Rewrite in Rust
|
||||
|
||||
Hello there! You are reading this post thanks to a lot of effort, research and
|
||||
consultation that has resulted in a complete from-scratch rewrite of this
|
||||
website in [Rust](https://rust-lang.org). The original implementation in Go is
|
||||
available [here](https://github.com/Xe/site/releases/tag/v1.5.0) should anyone
|
||||
want to reference that for any reason.
|
||||
|
||||
If you find any issues with the [RSS feed](/blog.rss), [Atom feed](/blog.atom)
|
||||
or [JSONFeed](/blog.json), please let me know as soon as possible so I can fix
|
||||
them.
|
||||
|
||||
This website stands on the shoulder of giants. Here are just a few of those and
|
||||
how they add up into this whole package.
|
||||
|
||||
## comrak
|
||||
|
||||
All of my posts are written in
|
||||
[markdown](https://github.com/Xe/site/blob/master/blog/all-there-is-is-now-2019-05-25.markdown).
|
||||
[comrak](https://github.com/kivikakk/comrak) is a markdown parser written by a
|
||||
friend of mine that is as fast and as correct as possible. comrak does the job
|
||||
of turning all of that markdown (over 150 files at the time of writing this
|
||||
post) into the HTML that you are reading right now. It also supports a lot of
|
||||
common markdown extensions, which I use heavily in my posts.
|
||||
|
||||
## warp
|
||||
|
||||
[warp](https://github.com/seanmonstar/warp) is the web framework I use for Rust.
|
||||
It gives users a set of filters that add up into entire web applications. For an
|
||||
example, see this example from its readme:
|
||||
|
||||
```rust
|
||||
use warp::Filter;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// GET /hello/warp => 200 OK with body "Hello, warp!"
|
||||
let hello = warp::path!("hello" / String)
|
||||
.map(|name| format!("Hello, {}!", name));
|
||||
|
||||
warp::serve(hello)
|
||||
.run(([127, 0, 0, 1], 3030))
|
||||
.await;
|
||||
}
|
||||
```
|
||||
|
||||
This can then be built up into something like this:
|
||||
|
||||
```rust
|
||||
let site = index
|
||||
.or(contact.or(feeds).or(resume.or(signalboost)).or(patrons))
|
||||
.or(blog_index.or(series.or(series_view).or(post_view)))
|
||||
.or(gallery_index.or(gallery_post_view))
|
||||
.or(talk_index.or(talk_post_view))
|
||||
.or(jsonfeed.or(atom).or(rss.or(sitemap)))
|
||||
.or(files.or(css).or(favicon).or(sw.or(robots)))
|
||||
.or(healthcheck.or(metrics_endpoint).or(go_vanity_jsonfeed))
|
||||
// ...
|
||||
```
|
||||
|
||||
which is the actual routing setup for this website!
|
||||
|
||||
## ructe
|
||||
|
||||
In the previous version of this site, I used Go's
|
||||
[html/template](https://godoc.org/html/template). Rust does not have an
|
||||
equivalent of html/template in its standard library. After some research, I
|
||||
settled on [ructe](https://github.com/kaj/ructe) for the HTML templates. ructe
|
||||
works by preprocessing templates using a little domain-specific language that
|
||||
compiles down to Rust source code. This makes the templates become optimized
|
||||
with the rest of the program and enables my website to render most pages in less
|
||||
than 100 microseconds. Here is an example template (the one for
|
||||
[/patrons](/patrons)):
|
||||
|
||||
```html
|
||||
@use patreon::Users;
|
||||
@use super::{header_html, footer_html};
|
||||
|
||||
@(users: Users)
|
||||
|
||||
@:header_html(Some("Patrons"), None)
|
||||
|
||||
<h1>Patrons</h1>
|
||||
|
||||
<p>These awesome people donate to me on <a href="https://patreon.com/cadey">Patreon</a>.
|
||||
If you would like to show up in this list, please donate to me on Patreon. This
|
||||
is refreshed every time the site is deployed.</p>
|
||||
|
||||
<p>
|
||||
<ul>
|
||||
@for user in users {
|
||||
<li>@user.attributes.full_name</li>
|
||||
}
|
||||
</ul>
|
||||
</p>
|
||||
|
||||
@:footer_html()
|
||||
```
|
||||
|
||||
The templates compile down to Rust, which lets me include other parts of the
|
||||
program into the templates. Here I use that to take a list of users from the
|
||||
incredibly hacky Patreon API client I wrote for this website and iterate over
|
||||
it, making a list of every patron by name.
|
||||
|
||||
## Build Process
|
||||
|
||||
As a nice side effect of this rewrite, my website is now completely built using
|
||||
[Nix](https://nixos.org/). This allows the website to be built reproducibly, as
|
||||
well as a full development environment setup for free for anyone that checks out
|
||||
the repo and runs `nix-shell`. Check out
|
||||
[naersk](https://github.com/nmattia/naersk) for the secret sauce that enables my
|
||||
docker image build. See [this blogpost](/blog/drone-kubernetes-cd-2020-07-10)
|
||||
for more information about this build process (though my site uses GitHub
|
||||
Actions instead of Drone).
|
||||
|
||||
## `jsonfeed` Go package
|
||||
|
||||
I used to have a [JSONFeed](https://www.jsonfeed.org/) package publicly visible
|
||||
at the go import path `christine.website/jsonfeed`. As far as I know I'm the
|
||||
only person who ended up using it; but in case there are any private repos that
|
||||
I don't know about depending on it, I have made the jsonfeed package available
|
||||
at its old location as well as its source code
|
||||
[here](https://tulpa.dev/Xe/jsonfeed). You may have to update your `go.mod` file
|
||||
to import `christine.website/jsonfeed` instead of `christine.website`. If
|
||||
something ends up going wrong as a result of this, please [file a GitHub issue
|
||||
here](https://github.com/Xe/site/issues/new) and I can attempt to assist
|
||||
further.
|
||||
|
||||
## `go_vanity` crate
|
||||
|
||||
I have written a small go vanity import crate and exposed it in my Git repo. If
|
||||
you want to use it, add it to your `Cargo.toml` like this:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
go_vanity = { git = "https://github.com/Xe/site", branch = "master" }
|
||||
```
|
||||
|
||||
You can then use it from any warp application by calling `go_vanity::github` or
|
||||
`go_vanity::gitea` like this:
|
||||
|
||||
```rust
|
||||
let go_vanity_jsonfeed = warp::path("jsonfeed")
|
||||
.and(warp::any().map(move || "christine.website/jsonfeed"))
|
||||
.and(warp::any().map(move || "https://tulpa.dev/Xe/jsonfeed"))
|
||||
.and_then(go_vanity::gitea);
|
||||
```
|
||||
|
||||
I plan to add full documentation to this crate soon as well as release it
|
||||
properly on crates.io.
|
||||
|
||||
## `patreon` crate
|
||||
|
||||
I have also written a small [Patreon](https://www.patreon.com/) API client and
|
||||
made it available in my Git repo. If you want to use it, add it to your
|
||||
`Cargo.toml` like this:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
patreon = { git = "https://github.com/Xe/site", branch = "master" }
|
||||
```
|
||||
|
||||
This client is _incredibly limited_ and only supports the minimum parts of the
|
||||
Patreon API that are required for my website to function. Patreon has also
|
||||
apparently started to phase out support for its API anyways, so I don't know how
|
||||
long this will be useful.
|
||||
|
||||
But this is there should you need it!
|
||||
|
||||
## Dhall Kubernetes Manifest
|
||||
|
||||
I also took the time to port the kubernetes manifest to
|
||||
[Dhall](https://dhall-lang.org/). This allows me to have a type-safe kubernetes
|
||||
manifest that will correctly have all of the secrets injected for me from the
|
||||
environment of the deploy script.
|
||||
|
||||
---
|
||||
|
||||
These are the biggest giants that my website now sits on. The code for this
|
||||
rewrite is still a bit messy. I'm working on making it better, but my goal is to
|
||||
have this website's code shine as an example of how to best write this kind of
|
||||
website in Rust. Check out the code [here](https://github.com/Xe/site).
|
|
@ -1,25 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ClackSet []string
|
||||
|
||||
func (cs ClackSet) Name() string {
|
||||
return "GNU " + cs[rand.Intn(len(cs))]
|
||||
}
|
||||
|
||||
func (cs ClackSet) Middleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add("X-Clacks-Overhead", cs.Name())
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
func init() {
|
||||
rand.Seed(time.Now().Unix())
|
||||
}
|
245
cmd/site/html.go
245
cmd/site/html.go
|
@ -1,245 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"christine.website/cmd/site/internal"
|
||||
"christine.website/cmd/site/internal/blog"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||
"within.website/ln"
|
||||
"within.website/ln/opname"
|
||||
)
|
||||
|
||||
var (
|
||||
templateRenderTime = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Name: "template_render_time",
|
||||
Help: "Template render time in nanoseconds",
|
||||
}, []string{"name"})
|
||||
)
|
||||
|
||||
func logTemplateTime(ctx context.Context, name string, f ln.F, from time.Time) {
|
||||
dur := time.Since(from)
|
||||
templateRenderTime.With(prometheus.Labels{"name": name}).Observe(float64(dur))
|
||||
ln.Log(ctx, f, ln.F{"dur": dur, "name": name})
|
||||
}
|
||||
|
||||
func (s *Site) renderTemplatePage(templateFname string, data interface{}) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := opname.With(r.Context(), "renderTemplatePage")
|
||||
fetag := "W/" + internal.Hash(templateFname, etag) + "-1"
|
||||
|
||||
f := ln.F{"etag": fetag, "if_none_match": r.Header.Get("If-None-Match")}
|
||||
|
||||
if r.Header.Get("If-None-Match") == fetag {
|
||||
http.Error(w, "Cached data OK", http.StatusNotModified)
|
||||
ln.Log(ctx, f, ln.Info("Cache hit"))
|
||||
return
|
||||
}
|
||||
|
||||
defer logTemplateTime(ctx, templateFname, f, time.Now())
|
||||
|
||||
var t *template.Template
|
||||
var err error
|
||||
|
||||
t, err = template.ParseFiles("templates/base.html", "templates/"+templateFname)
|
||||
if err != nil {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
ln.Error(ctx, err, ln.F{"action": "renderTemplatePage", "page": templateFname})
|
||||
fmt.Fprintf(w, "error: %v", err)
|
||||
}
|
||||
|
||||
w.Header().Set("ETag", fetag)
|
||||
w.Header().Set("Cache-Control", "max-age=432000")
|
||||
|
||||
err = t.Execute(w, data)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
var postView = promauto.NewCounterVec(prometheus.CounterOpts{
|
||||
Name: "posts_viewed",
|
||||
Help: "The number of views per post or talk",
|
||||
}, []string{"base"})
|
||||
|
||||
func (s *Site) listSeries(w http.ResponseWriter, r *http.Request) {
|
||||
s.renderTemplatePage("series.html", s.Series).ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
func (s *Site) showSeries(w http.ResponseWriter, r *http.Request) {
|
||||
if r.RequestURI == "/blog/series/" {
|
||||
http.Redirect(w, r, "/blog/series", http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
|
||||
series := filepath.Base(r.URL.Path)
|
||||
var posts []blog.Post
|
||||
|
||||
for _, p := range s.Posts {
|
||||
if p.Series == series {
|
||||
posts = append(posts, p)
|
||||
}
|
||||
}
|
||||
|
||||
s.renderTemplatePage("serieslist.html", struct {
|
||||
Name string
|
||||
Posts []blog.Post
|
||||
}{
|
||||
Name: series,
|
||||
Posts: posts,
|
||||
}).ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
func (s *Site) showGallery(w http.ResponseWriter, r *http.Request) {
|
||||
if r.RequestURI == "/gallery/" {
|
||||
http.Redirect(w, r, "/gallery", http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
|
||||
cmp := r.URL.Path[1:]
|
||||
var p blog.Post
|
||||
var found bool
|
||||
for _, pst := range s.Gallery {
|
||||
if pst.Link == cmp {
|
||||
p = pst
|
||||
found = true
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
s.renderTemplatePage("error.html", "no such post found: "+r.RequestURI).ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
var tags string
|
||||
if len(p.Tags) != 0 {
|
||||
for _, t := range p.Tags {
|
||||
tags = tags + " #" + strings.ReplaceAll(t, "-", "")
|
||||
}
|
||||
}
|
||||
|
||||
h := s.renderTemplatePage("gallerypost.html", struct {
|
||||
Title string
|
||||
Link string
|
||||
BodyHTML template.HTML
|
||||
Date string
|
||||
Tags string
|
||||
Image string
|
||||
}{
|
||||
Title: p.Title,
|
||||
Link: p.Link,
|
||||
BodyHTML: p.BodyHTML,
|
||||
Date: internal.IOS13Detri(p.Date),
|
||||
Tags: tags,
|
||||
Image: p.ImageURL,
|
||||
})
|
||||
|
||||
if h == nil {
|
||||
panic("how did we get here?")
|
||||
}
|
||||
|
||||
h.ServeHTTP(w, r)
|
||||
postView.With(prometheus.Labels{"base": filepath.Base(p.Link)}).Inc()
|
||||
}
|
||||
|
||||
func (s *Site) showTalk(w http.ResponseWriter, r *http.Request) {
|
||||
if r.RequestURI == "/talks/" {
|
||||
http.Redirect(w, r, "/talks", http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
|
||||
cmp := r.URL.Path[1:]
|
||||
var p blog.Post
|
||||
var found bool
|
||||
for _, pst := range s.Talks {
|
||||
if pst.Link == cmp {
|
||||
p = pst
|
||||
found = true
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
s.renderTemplatePage("error.html", "no such post found: "+r.RequestURI).ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
h := s.renderTemplatePage("talkpost.html", struct {
|
||||
Title string
|
||||
Link string
|
||||
BodyHTML template.HTML
|
||||
Date string
|
||||
SlidesLink string
|
||||
}{
|
||||
Title: p.Title,
|
||||
Link: p.Link,
|
||||
BodyHTML: p.BodyHTML,
|
||||
Date: internal.IOS13Detri(p.Date),
|
||||
SlidesLink: p.SlidesLink,
|
||||
})
|
||||
|
||||
if h == nil {
|
||||
panic("how did we get here?")
|
||||
}
|
||||
|
||||
h.ServeHTTP(w, r)
|
||||
postView.With(prometheus.Labels{"base": filepath.Base(p.Link)}).Inc()
|
||||
}
|
||||
|
||||
func (s *Site) showPost(w http.ResponseWriter, r *http.Request) {
|
||||
if r.RequestURI == "/blog/" {
|
||||
http.Redirect(w, r, "/blog", http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
|
||||
cmp := r.URL.Path[1:]
|
||||
var p blog.Post
|
||||
var found bool
|
||||
for _, pst := range s.Posts {
|
||||
if pst.Link == cmp {
|
||||
p = pst
|
||||
found = true
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
s.renderTemplatePage("error.html", "no such post found: "+r.RequestURI).ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
var tags string
|
||||
|
||||
if len(p.Tags) != 0 {
|
||||
for _, t := range p.Tags {
|
||||
tags = tags + " #" + strings.ReplaceAll(t, "-", "")
|
||||
}
|
||||
}
|
||||
|
||||
s.renderTemplatePage("blogpost.html", struct {
|
||||
Title string
|
||||
Link string
|
||||
BodyHTML template.HTML
|
||||
Date string
|
||||
Series, SeriesTag string
|
||||
Tags string
|
||||
}{
|
||||
Title: p.Title,
|
||||
Link: p.Link,
|
||||
BodyHTML: p.BodyHTML,
|
||||
Date: internal.IOS13Detri(p.Date),
|
||||
Series: p.Series,
|
||||
SeriesTag: strings.ReplaceAll(p.Series, "-", ""),
|
||||
Tags: tags,
|
||||
}).ServeHTTP(w, r)
|
||||
postView.With(prometheus.Labels{"base": filepath.Base(p.Link)}).Inc()
|
||||
}
|
|
@ -1,137 +0,0 @@
|
|||
package blog
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"christine.website/cmd/site/internal/front"
|
||||
"github.com/russross/blackfriday"
|
||||
)
|
||||
|
||||
// Post is a single blogpost.
|
||||
type Post struct {
|
||||
Title string `json:"title"`
|
||||
Link string `json:"link"`
|
||||
Summary string `json:"summary,omitifempty"`
|
||||
Body string `json:"-"`
|
||||
BodyHTML template.HTML `json:"body"`
|
||||
Series string `json:"series"`
|
||||
Tags []string `json:"tags"`
|
||||
SlidesLink string `json:"slides_link"`
|
||||
ImageURL string `json:"image_url"`
|
||||
ThumbURL string `json:"thumb_url"`
|
||||
Date time.Time
|
||||
DateString string `json:"date"`
|
||||
}
|
||||
|
||||
// Posts implements sort.Interface for a slice of Post objects.
|
||||
type Posts []Post
|
||||
|
||||
func (p Posts) Series() []string {
|
||||
names := map[string]struct{}{}
|
||||
|
||||
for _, ps := range p {
|
||||
if ps.Series != "" {
|
||||
names[ps.Series] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
var result []string
|
||||
|
||||
for name := range names {
|
||||
result = append(result, name)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (p Posts) Len() int { return len(p) }
|
||||
func (p Posts) Less(i, j int) bool {
|
||||
iDate := p[i].Date
|
||||
jDate := p[j].Date
|
||||
|
||||
return iDate.Unix() < jDate.Unix()
|
||||
}
|
||||
func (p Posts) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
|
||||
// LoadPosts loads posts for a given directory.
|
||||
func LoadPosts(path string, prepend string) (Posts, error) {
|
||||
type postFM struct {
|
||||
Title string
|
||||
Date string
|
||||
Series string
|
||||
Tags []string
|
||||
SlidesLink string `yaml:"slides_link"`
|
||||
Image string
|
||||
Thumb string
|
||||
Show string
|
||||
}
|
||||
var result Posts
|
||||
|
||||
err := filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
fin, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer fin.Close()
|
||||
|
||||
content, err := ioutil.ReadAll(fin)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var fm postFM
|
||||
remaining, err := front.Unmarshal(content, &fm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
output := blackfriday.Run(remaining)
|
||||
|
||||
const timeFormat = `2006-01-02`
|
||||
date, err := time.Parse(timeFormat, fm.Date)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fname := filepath.Base(path)
|
||||
fname = strings.TrimSuffix(fname, filepath.Ext(fname))
|
||||
|
||||
p := Post{
|
||||
Title: fm.Title,
|
||||
Date: date,
|
||||
DateString: fm.Date,
|
||||
Link: filepath.Join(prepend, fname),
|
||||
Body: string(remaining),
|
||||
BodyHTML: template.HTML(output),
|
||||
SlidesLink: fm.SlidesLink,
|
||||
Series: fm.Series,
|
||||
Tags: fm.Tags,
|
||||
ImageURL: fm.Image,
|
||||
ThumbURL: fm.Thumb,
|
||||
}
|
||||
result = append(result, p)
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sort.Sort(sort.Reverse(result))
|
||||
|
||||
return result, nil
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
package blog
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLoadPosts(t *testing.T) {
|
||||
posts, err := LoadPosts("../../../../blog", "blog")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, post := range posts {
|
||||
t.Run(post.Link, post.test)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadTalks(t *testing.T) {
|
||||
talks, err := LoadPosts("../../../../talks", "talks")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, talk := range talks {
|
||||
t.Run(talk.Link, talk.test)
|
||||
if talk.SlidesLink == "" {
|
||||
t.Errorf("talk %s (%s) doesn't have a slides link", talk.Title, talk.DateString)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadGallery(t *testing.T) {
|
||||
gallery, err := LoadPosts("../../../../gallery", "gallery")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, art := range gallery {
|
||||
t.Run(art.Link, art.test)
|
||||
if art.ImageURL == "" {
|
||||
t.Errorf("art %s (%s) doesn't have an image link", art.Title, art.DateString)
|
||||
}
|
||||
if art.ThumbURL == "" {
|
||||
t.Errorf("art %s (%s) doesn't have a thumbnail link", art.Title, art.DateString)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func (p Post) test(t *testing.T) {
|
||||
if p.Title == "" {
|
||||
t.Error("no post title")
|
||||
}
|
||||
|
||||
if p.DateString == "" {
|
||||
t.Error("no date")
|
||||
}
|
||||
|
||||
if p.Link == "" {
|
||||
t.Error("no link")
|
||||
}
|
||||
|
||||
if p.Body == "" {
|
||||
t.Error("no body")
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
package internal
|
||||
|
||||
import "time"
|
||||
|
||||
const iOS13DetriFormat = `2006 M1 2`
|
||||
|
||||
// IOS13Detri formats a datestamp like iOS 13 does with the Lojban locale.
|
||||
func IOS13Detri(t time.Time) string {
|
||||
return t.Format(iOS13DetriFormat)
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
package internal
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestIOS13Detri(t *testing.T) {
|
||||
cases := []struct {
|
||||
in time.Time
|
||||
out string
|
||||
}{
|
||||
{
|
||||
in: time.Date(2019, time.March, 30, 0, 0, 0, 0, time.FixedZone("UTC", 0)),
|
||||
out: "2019 M3 30",
|
||||
},
|
||||
}
|
||||
|
||||
for _, cs := range cases {
|
||||
t.Run(fmt.Sprintf("%s -> %s", cs.in.Format(time.RFC3339), cs.out), func(t *testing.T) {
|
||||
result := IOS13Detri(cs.in)
|
||||
if result != cs.out {
|
||||
t.Fatalf("wanted: %s, got: %s", cs.out, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
Copyright (c) 2017 TJ Holowaychuk <tj@vision-media.ca>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
|
@ -1,24 +0,0 @@
|
|||
// Package front provides YAML frontmatter unmarshalling.
|
||||
package front
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// Delimiter.
|
||||
var delim = []byte("---")
|
||||
|
||||
// Unmarshal parses YAML frontmatter and returns the content. When no
|
||||
// frontmatter delimiters are present the original content is returned.
|
||||
func Unmarshal(b []byte, v interface{}) (content []byte, err error) {
|
||||
if !bytes.HasPrefix(b, delim) {
|
||||
return b, nil
|
||||
}
|
||||
|
||||
parts := bytes.SplitN(b, delim, 3)
|
||||
content = parts[2]
|
||||
err = yaml.Unmarshal(parts[1], v)
|
||||
return
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
package front_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"christine.website/cmd/site/internal/front"
|
||||
)
|
||||
|
||||
var markdown = []byte(`---
|
||||
title: Ferrets
|
||||
authors:
|
||||
- Tobi
|
||||
- Loki
|
||||
- Jane
|
||||
---
|
||||
Some content here, so
|
||||
interesting, you just
|
||||
want to keep reading.`)
|
||||
|
||||
type article struct {
|
||||
Title string
|
||||
Authors []string
|
||||
}
|
||||
|
||||
func Example() {
|
||||
var a article
|
||||
|
||||
content, err := front.Unmarshal(markdown, &a)
|
||||
if err != nil {
|
||||
log.Fatalf("error unmarshalling: %s", err)
|
||||
}
|
||||
|
||||
fmt.Printf("%#v\n", a)
|
||||
fmt.Printf("%s\n", string(content))
|
||||
// Output:
|
||||
// front_test.article{Title:"Ferrets", Authors:[]string{"Tobi", "Loki", "Jane"}}
|
||||
//
|
||||
// Some content here, so
|
||||
// interesting, you just
|
||||
// want to keep reading.
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
package internal
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Hash is a simple wrapper around the MD5 algorithm implementation in the
|
||||
// Go standard library. It takes in data and a salt and returns the hashed
|
||||
// representation.
|
||||
func Hash(data string, salt string) string {
|
||||
output := md5.Sum([]byte(data + salt))
|
||||
return fmt.Sprintf("%x", output)
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
)
|
||||
|
||||
var (
|
||||
requestCounter = prometheus.NewCounterVec(
|
||||
prometheus.CounterOpts{
|
||||
Name: "handler_requests_total",
|
||||
Help: "Total number of request/responses by HTTP status code.",
|
||||
}, []string{"handler", "code"})
|
||||
|
||||
requestDuration = prometheus.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Name: "handler_request_duration",
|
||||
Help: "Handler request duration.",
|
||||
}, []string{"handler", "method"})
|
||||
|
||||
requestInFlight = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Name: "handler_requests_in_flight",
|
||||
Help: "Current number of requests being served.",
|
||||
}, []string{"handler"})
|
||||
)
|
||||
|
||||
func init() {
|
||||
_ = prometheus.Register(requestCounter)
|
||||
_ = prometheus.Register(requestDuration)
|
||||
_ = prometheus.Register(requestInFlight)
|
||||
}
|
||||
|
||||
// Metrics captures request duration, request count and in-flight request count
|
||||
// metrics for HTTP handlers. The family field is used to discriminate handlers.
|
||||
func Metrics(family string, next http.Handler) http.Handler {
|
||||
return promhttp.InstrumentHandlerDuration(
|
||||
requestDuration.MustCurryWith(prometheus.Labels{"handler": family}),
|
||||
promhttp.InstrumentHandlerCounter(requestCounter.MustCurryWith(prometheus.Labels{"handler": family}),
|
||||
promhttp.InstrumentHandlerInFlight(requestInFlight.With(prometheus.Labels{"handler": family}), next),
|
||||
),
|
||||
)
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/celrenheit/sandflake"
|
||||
"within.website/ln"
|
||||
)
|
||||
|
||||
// RequestID appends a unique (sandflake) request ID to each request's
|
||||
// X-Request-Id header field, much like Heroku's router does.
|
||||
func RequestID(next http.Handler) http.Handler {
|
||||
var g sandflake.Generator
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
id := g.Next().String()
|
||||
|
||||
if rid := r.Header.Get("X-Request-Id"); rid != "" {
|
||||
id = rid + "," + id
|
||||
}
|
||||
|
||||
ctx := ln.WithF(r.Context(), ln.F{
|
||||
"request_id": id,
|
||||
})
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
w.Header().Set("X-Request-Id", id)
|
||||
r.Header.Set("X-Request-Id", id)
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
296
cmd/site/main.go
296
cmd/site/main.go
|
@ -1,296 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"html/template"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"christine.website/cmd/site/internal/blog"
|
||||
"christine.website/cmd/site/internal/middleware"
|
||||
"christine.website/jsonfeed"
|
||||
"github.com/gorilla/feeds"
|
||||
_ "github.com/joho/godotenv/autoload"
|
||||
"github.com/povilasv/prommod"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
blackfriday "github.com/russross/blackfriday"
|
||||
"github.com/sebest/xff"
|
||||
"github.com/snabb/sitemap"
|
||||
"within.website/ln"
|
||||
"within.website/ln/ex"
|
||||
"within.website/ln/opname"
|
||||
)
|
||||
|
||||
var port = os.Getenv("PORT")
|
||||
|
||||
func main() {
|
||||
if port == "" {
|
||||
port = "29384"
|
||||
}
|
||||
|
||||
ctx := ln.WithF(opname.With(context.Background(), "main"), ln.F{
|
||||
"port": port,
|
||||
"git_rev": gitRev,
|
||||
})
|
||||
|
||||
_ = prometheus.Register(prommod.NewCollector("christine"))
|
||||
|
||||
s, err := Build()
|
||||
if err != nil {
|
||||
ln.FatalErr(ctx, err, ln.Action("Build"))
|
||||
}
|
||||
|
||||
mux := http.NewServeMux()
|
||||
mux.HandleFunc("/.within/health", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Error(w, "OK", http.StatusOK)
|
||||
})
|
||||
mux.Handle("/", s)
|
||||
|
||||
ln.Log(ctx, ln.Action("http_listening"))
|
||||
ln.FatalErr(ctx, http.ListenAndServe(":"+port, mux))
|
||||
}
|
||||
|
||||
// Site is the parent object for https://christine.website's backend.
|
||||
type Site struct {
|
||||
Posts blog.Posts
|
||||
Talks blog.Posts
|
||||
Gallery blog.Posts
|
||||
Resume template.HTML
|
||||
Series []string
|
||||
SignalBoost []Person
|
||||
|
||||
clacks ClackSet
|
||||
patrons []string
|
||||
rssFeed *feeds.Feed
|
||||
jsonFeed *jsonfeed.Feed
|
||||
|
||||
mux *http.ServeMux
|
||||
xffmw *xff.XFF
|
||||
}
|
||||
|
||||
var gitRev = os.Getenv("GIT_REV")
|
||||
|
||||
func envOr(key, or string) string {
|
||||
if result, ok := os.LookupEnv(key); ok {
|
||||
return result
|
||||
}
|
||||
|
||||
return or
|
||||
}
|
||||
|
||||
func (s *Site) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := opname.With(r.Context(), "site.ServeHTTP")
|
||||
ctx = ln.WithF(ctx, ln.F{
|
||||
"user_agent": r.Header.Get("User-Agent"),
|
||||
})
|
||||
r = r.WithContext(ctx)
|
||||
if gitRev != "" {
|
||||
w.Header().Add("X-Git-Rev", gitRev)
|
||||
}
|
||||
|
||||
w.Header().Add("X-Hacker", "If you are reading this, check out /signalboost to find people for your team")
|
||||
|
||||
s.clacks.Middleware(
|
||||
middleware.RequestID(
|
||||
s.xffmw.Handler(
|
||||
ex.HTTPLog(s.mux),
|
||||
),
|
||||
),
|
||||
).ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
var arbDate = time.Date(2020, time.May, 21, 0, 0, 0, 0, time.UTC)
|
||||
|
||||
// Build creates a new Site instance or fails.
|
||||
func Build() (*Site, error) {
|
||||
pc, err := NewPatreonClient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pledges, err := GetPledges(pc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
people, err := loadPeople("./signalboost.dhall")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
smi := sitemap.New()
|
||||
smi.Add(&sitemap.URL{
|
||||
Loc: "https://christine.website/resume",
|
||||
LastMod: &arbDate,
|
||||
ChangeFreq: sitemap.Monthly,
|
||||
})
|
||||
|
||||
smi.Add(&sitemap.URL{
|
||||
Loc: "https://christine.website/contact",
|
||||
LastMod: &arbDate,
|
||||
ChangeFreq: sitemap.Monthly,
|
||||
})
|
||||
|
||||
smi.Add(&sitemap.URL{
|
||||
Loc: "https://christine.website/",
|
||||
LastMod: &arbDate,
|
||||
ChangeFreq: sitemap.Monthly,
|
||||
})
|
||||
|
||||
smi.Add(&sitemap.URL{
|
||||
Loc: "https://christine.website/patrons",
|
||||
LastMod: &arbDate,
|
||||
ChangeFreq: sitemap.Weekly,
|
||||
})
|
||||
|
||||
smi.Add(&sitemap.URL{
|
||||
Loc: "https://christine.website/blog",
|
||||
LastMod: &arbDate,
|
||||
ChangeFreq: sitemap.Weekly,
|
||||
})
|
||||
|
||||
xffmw, err := xff.Default()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s := &Site{
|
||||
rssFeed: &feeds.Feed{
|
||||
Title: "Christine Dodrill's Blog",
|
||||
Link: &feeds.Link{Href: "https://christine.website/blog"},
|
||||
Description: "My blog posts and rants about various technology things.",
|
||||
Author: &feeds.Author{Name: "Christine Dodrill", Email: "me@christine.website"},
|
||||
Created: bootTime,
|
||||
Copyright: "This work is copyright Christine Dodrill. My viewpoints are my own and not the view of any employer past, current or future.",
|
||||
},
|
||||
jsonFeed: &jsonfeed.Feed{
|
||||
Version: jsonfeed.CurrentVersion,
|
||||
Title: "Christine Dodrill's Blog",
|
||||
HomePageURL: "https://christine.website",
|
||||
FeedURL: "https://christine.website/blog.json",
|
||||
Description: "My blog posts and rants about various technology things.",
|
||||
UserComment: "This is a JSON feed of my blogposts. For more information read: https://jsonfeed.org/version/1",
|
||||
Icon: icon,
|
||||
Favicon: icon,
|
||||
Author: jsonfeed.Author{
|
||||
Name: "Christine Dodrill",
|
||||
Avatar: icon,
|
||||
},
|
||||
},
|
||||
mux: http.NewServeMux(),
|
||||
xffmw: xffmw,
|
||||
|
||||
clacks: ClackSet(strings.Split(envOr("CLACK_SET", "Ashlynn"), ",")),
|
||||
patrons: pledges,
|
||||
SignalBoost: people,
|
||||
}
|
||||
|
||||
posts, err := blog.LoadPosts("./blog/", "blog")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.Posts = posts
|
||||
s.Series = posts.Series()
|
||||
sort.Strings(s.Series)
|
||||
|
||||
talks, err := blog.LoadPosts("./talks", "talks")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.Talks = talks
|
||||
|
||||
gallery, err := blog.LoadPosts("./gallery", "gallery")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.Gallery = gallery
|
||||
|
||||
var everything blog.Posts
|
||||
everything = append(everything, posts...)
|
||||
everything = append(everything, talks...)
|
||||
everything = append(everything, gallery...)
|
||||
|
||||
sort.Sort(sort.Reverse(everything))
|
||||
|
||||
resumeData, err := ioutil.ReadFile("./static/resume/resume.md")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s.Resume = template.HTML(blackfriday.Run(resumeData))
|
||||
|
||||
for _, item := range everything {
|
||||
s.rssFeed.Items = append(s.rssFeed.Items, &feeds.Item{
|
||||
Title: item.Title,
|
||||
Link: &feeds.Link{Href: "https://christine.website/" + item.Link},
|
||||
Description: item.Summary,
|
||||
Created: item.Date,
|
||||
Content: string(item.BodyHTML),
|
||||
})
|
||||
|
||||
s.jsonFeed.Items = append(s.jsonFeed.Items, jsonfeed.Item{
|
||||
ID: "https://christine.website/" + item.Link,
|
||||
URL: "https://christine.website/" + item.Link,
|
||||
Title: item.Title,
|
||||
DatePublished: item.Date,
|
||||
ContentHTML: string(item.BodyHTML),
|
||||
Tags: item.Tags,
|
||||
})
|
||||
|
||||
smi.Add(&sitemap.URL{
|
||||
Loc: "https://christine.website/" + item.Link,
|
||||
LastMod: &item.Date,
|
||||
ChangeFreq: sitemap.Monthly,
|
||||
})
|
||||
}
|
||||
|
||||
// Add HTTP routes here
|
||||
s.mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path != "/" {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
s.renderTemplatePage("error.html", "can't find "+r.URL.Path).ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||