Compare commits

..

143 Commits

Author SHA1 Message Date
Cadey Ratio 33b7dfa21c stanley: minor fixes suggested by the editor
Signed-off-by: Xe Iaso <me@christine.website>
2022-07-20 22:52:37 -04:00
dependabot[bot] d7b817b22d
build(deps): bump axum from 0.5.12 to 0.5.13 (#517)
Bumps [axum](https://github.com/tokio-rs/axum) from 0.5.12 to 0.5.13.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-v0.5.12...axum-v0.5.13)

---
updated-dependencies:
- dependency-name: axum
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-15 07:58:04 -04:00
Cadey Ratio 08ccd40888 yeet
Signed-off-by: Xe Iaso <me@christine.website>
2022-07-14 19:23:32 -04:00
dependabot[bot] 3fcbe4ebbf
build(deps): bump comrak from 0.13.2 to 0.14.0 (#516)
Bumps [comrak](https://github.com/kivikakk/comrak) from 0.13.2 to 0.14.0.
- [Release notes](https://github.com/kivikakk/comrak/releases)
- [Changelog](https://github.com/kivikakk/comrak/blob/main/changelog.txt)
- [Commits](https://github.com/kivikakk/comrak/compare/0.13.2...0.14.0)

---
updated-dependencies:
- dependency-name: comrak
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-13 17:46:38 -04:00
dependabot[bot] 8759f9d590
build(deps): bump axum from 0.5.11 to 0.5.12 (#513)
Bumps [axum](https://github.com/tokio-rs/axum) from 0.5.11 to 0.5.12.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-v0.5.11...axum-v0.5.12)

---
updated-dependencies:
- dependency-name: axum
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-12 16:35:15 -04:00
dependabot[bot] b36afa9dfd
build(deps): bump color-eyre from 0.6.1 to 0.6.2 (#515)
Bumps [color-eyre](https://github.com/yaahc/color-eyre) from 0.6.1 to 0.6.2.
- [Release notes](https://github.com/yaahc/color-eyre/releases)
- [Changelog](https://github.com/yaahc/color-eyre/blob/master/CHANGELOG.md)
- [Commits](https://github.com/yaahc/color-eyre/compare/v0.6.1...v0.6.2)

---
updated-dependencies:
- dependency-name: color-eyre
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-12 16:35:07 -04:00
Cadey Ratio f91a5a7878
The Stanley Parable (#514)
Signed-off-by: Xe <me@christine.website>
2022-07-11 17:53:48 -04:00
Cadey Ratio b32f5a25af consolidate API routes
Signed-off-by: Xe Iaso <me@christine.website>
2022-07-10 09:58:57 -04:00
Cadey Ratio 18ae8a01f8 Spearphishing
Signed-off-by: Xe Iaso <me@christine.website>
2022-07-09 14:48:04 -04:00
Loric Brevet 04151593cc
blog: update devShell to devShells for compatibility with recent spec (#505) 2022-07-06 17:08:16 -04:00
dependabot[bot] a6ddd7e3a6
build(deps): bump regex from 1.5.6 to 1.6.0 (#511)
Bumps [regex](https://github.com/rust-lang/regex) from 1.5.6 to 1.6.0.
- [Release notes](https://github.com/rust-lang/regex/releases)
- [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rust-lang/regex/compare/1.5.6...1.6.0)

---
updated-dependencies:
- dependency-name: regex
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-06 17:07:56 -04:00
dependabot[bot] f8dae6fa5e
build(deps): bump tracing-subscriber from 0.3.11 to 0.3.14 (#509)
Bumps [tracing-subscriber](https://github.com/tokio-rs/tracing) from 0.3.11 to 0.3.14.
- [Release notes](https://github.com/tokio-rs/tracing/releases)
- [Commits](https://github.com/tokio-rs/tracing/compare/tracing-subscriber-0.3.11...tracing-subscriber-0.3.14)

---
updated-dependencies:
- dependency-name: tracing-subscriber
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-04 12:52:43 -04:00
dependabot[bot] ed1e8eecee
build(deps): bump axum from 0.5.10 to 0.5.11 (#508)
Bumps [axum](https://github.com/tokio-rs/axum) from 0.5.10 to 0.5.11.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-v0.5.10...axum-v0.5.11)

---
updated-dependencies:
- dependency-name: axum
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-04 12:52:37 -04:00
dependabot[bot] 3020a92086
build(deps): bump axum-extra from 0.3.5 to 0.3.6 (#507)
Bumps [axum-extra](https://github.com/tokio-rs/axum) from 0.3.5 to 0.3.6.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-extra-v0.3.5...axum-extra-v0.3.6)

---
updated-dependencies:
- dependency-name: axum-extra
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-04 12:52:30 -04:00
dependabot[bot] d93bdc58a6
build(deps): bump comrak from 0.13.1 to 0.13.2 (#510)
Bumps [comrak](https://github.com/kivikakk/comrak) from 0.13.1 to 0.13.2.
- [Release notes](https://github.com/kivikakk/comrak/releases)
- [Changelog](https://github.com/kivikakk/comrak/blob/main/changelog.txt)
- [Commits](https://github.com/kivikakk/comrak/compare/0.13.1...0.13.2)

---
updated-dependencies:
- dependency-name: comrak
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-04 12:52:18 -04:00
Cadey Ratio 7f6de2cb09 add _xesite_frontmatter extension
Signed-off-by: Xe Iaso <me@christine.website>
2022-07-04 16:35:14 +00:00
Cadey Ratio 8b6056fc09 add API calls for my blogposts/talks
Signed-off-by: Xe Iaso <me@christine.website>
2022-07-04 15:12:23 +00:00
Cadey Ratio 5d7daf179e handlers: fix LAST_MODIFIED date format
Closes #463

Signed-off-by: Xe <me@christine.website>
2022-07-04 14:44:09 +00:00
Cadey Ratio 6be8b24dd2 rename jsonfeed to xe_jsonfeed to prepare for my own extensions
Signed-off-by: Xe Iaso <me@christine.website>
2022-07-04 14:44:00 +00:00
Cadey Ratio 03fa2e33a1 update doing a writing date
Signed-off-by: Xe Iaso <me@christine.website>
2022-06-29 19:54:07 -04:00
dependabot[bot] bf95a3f7e0
build(deps): bump axum-macros from 0.2.2 to 0.2.3 (#502)
Bumps [axum-macros](https://github.com/tokio-rs/axum) from 0.2.2 to 0.2.3.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-macros-v0.2.2...axum-macros-v0.2.3)

---
updated-dependencies:
- dependency-name: axum-macros
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-29 10:41:56 -04:00
dependabot[bot] 70b2cc5408
build(deps): bump axum-extra from 0.3.4 to 0.3.5 (#503)
Bumps [axum-extra](https://github.com/tokio-rs/axum) from 0.3.4 to 0.3.5.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-extra-v0.3.4...axum-extra-v0.3.5)

---
updated-dependencies:
- dependency-name: axum-extra
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-29 10:41:51 -04:00
dependabot[bot] b5b804e083
build(deps): bump axum from 0.5.9 to 0.5.10 (#504)
Bumps [axum](https://github.com/tokio-rs/axum) from 0.5.9 to 0.5.10.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-v0.5.9...axum-v0.5.10)

---
updated-dependencies:
- dependency-name: axum
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-29 10:38:04 -04:00
dependabot[bot] 7828552cca
build(deps): bump serde_dhall from 0.11.1 to 0.11.2 (#500)
Bumps [serde_dhall](https://github.com/Nadrieril/dhall-rust) from 0.11.1 to 0.11.2.
- [Release notes](https://github.com/Nadrieril/dhall-rust/releases)
- [Changelog](https://github.com/Nadrieril/dhall-rust/blob/master/CHANGELOG.md)
- [Commits](https://github.com/Nadrieril/dhall-rust/compare/serde_dhall-v0.11.1...serde_dhall-v0.11.2)

---
updated-dependencies:
- dependency-name: serde_dhall
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-27 18:25:33 -04:00
dependabot[bot] 000ed82301
build(deps): bump comrak from 0.13.0 to 0.13.1 (#501)
Bumps [comrak](https://github.com/kivikakk/comrak) from 0.13.0 to 0.13.1.
- [Release notes](https://github.com/kivikakk/comrak/releases)
- [Changelog](https://github.com/kivikakk/comrak/blob/main/changelog.txt)
- [Commits](https://github.com/kivikakk/comrak/compare/0.13.0...0.13.1)

---
updated-dependencies:
- dependency-name: comrak
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-27 18:25:22 -04:00
Cadey Ratio 817d426314 vods
Signed-off-by: Xe <me@christine.website>
2022-06-25 22:40:24 +00:00
Cadey Ratio 13f28eae67 blog: writing about writing
Signed-off-by: Xe <me@christine.website>
2022-06-25 18:40:58 +00:00
Cadey Ratio a1974a5948 start playing with resume data types
Signed-off-by: Xe <me@christine.website>
2022-06-21 23:17:53 +00:00
Cadey Ratio 15a130cc3d job history: even more
Signed-off-by: Xe <me@christine.website>
2022-06-21 23:17:53 +00:00
dependabot[bot] 9f977b3882
build(deps): bump axum from 0.5.8 to 0.5.9 (#498)
Bumps [axum](https://github.com/tokio-rs/axum) from 0.5.8 to 0.5.9.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-v0.5.8...axum-v0.5.9)

---
updated-dependencies:
- dependency-name: axum
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-21 09:57:18 -04:00
dependabot[bot] 50dcfbb19d
build(deps): bump axum from 0.5.7 to 0.5.8 (#497)
Bumps [axum](https://github.com/tokio-rs/axum) from 0.5.7 to 0.5.8.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-v0.5.7...axum-v0.5.8)

---
updated-dependencies:
- dependency-name: axum
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-20 15:10:12 -04:00
dependabot[bot] aeab6536e0
build(deps): bump tower from 0.4.12 to 0.4.13 (#496)
Bumps [tower](https://github.com/tower-rs/tower) from 0.4.12 to 0.4.13.
- [Release notes](https://github.com/tower-rs/tower/releases)
- [Commits](https://github.com/tower-rs/tower/compare/tower-0.4.12...tower-0.4.13)

---
updated-dependencies:
- dependency-name: tower
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-20 15:10:03 -04:00
bjorn3 78485b7d49
Fix typo on the salary transparency page (#495) 2022-06-16 17:39:08 -04:00
dependabot[bot] cea1e230da
build(deps): bump reqwest from 0.11.10 to 0.11.11 (#491)
Bumps [reqwest](https://github.com/seanmonstar/reqwest) from 0.11.10 to 0.11.11.
- [Release notes](https://github.com/seanmonstar/reqwest/releases)
- [Changelog](https://github.com/seanmonstar/reqwest/blob/master/CHANGELOG.md)
- [Commits](https://github.com/seanmonstar/reqwest/compare/v0.11.10...v0.11.11)

---
updated-dependencies:
- dependency-name: reqwest
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-15 11:15:32 -04:00
Vincent Bernat 3a592f99db
Fix typo in salary transparency article (#494) 2022-06-15 06:27:43 -04:00
Molly Miller d0c2df003b
Fix some typos in salary transparency pages. (#493)
Replace erroneous "hirt" with intended "hurt".

Signed-off-by: Molly Miller <sysvinit@users.noreply.github.com>

Co-authored-by: Molly Miller <sysvinit@users.noreply.github.com>
2022-06-14 18:41:09 -04:00
Cadey Ratio ad6fba4c79
Add salary transparency page (#492)
* Move dhall data and types into `/dhall` folder
* Reformat salary transparency data into Dhall
* Wire up the old salary transparency page with a custom element
* Wire up a new salary transparency page
* Expose raw data as JSON
* Make dhall types more portable
* Remove gallery from the navbar
* Make signal boost page point to the new data location
* Add salary transparency page to the footer of the site
* Add site update post for this

Signed-off-by: Xe <me@xeiaso.net>
2022-06-14 15:04:17 -04:00
Gabriel Simmer 7541df7781
Add myself to signalboost.dhall (#490) 2022-06-13 09:05:11 -04:00
Cadey Ratio 5c09a72207 set date to today, yolo
Signed-off-by: Xe Iaso <me@christine.website>
2022-06-09 13:00:57 -04:00
Cadey Ratio 792be1eb55 oops lol
Signed-off-by: Xe Iaso <me@christine.website>
2022-06-08 17:41:18 -04:00
Cadey Ratio aa10dce9ee push to day after
Signed-off-by: Xe Iaso <me@christine.website>
2022-06-08 17:24:10 -04:00
Cadey Ratio aca8b80892 talks: add static analysis talk
Signed-off-by: Xe Iaso <me@christine.website>
2022-06-08 17:20:48 -04:00
Cadey Ratio dc3f6471e7 Add hero image support with <xeblog-hero>
Also lightens the JavaScript load and shifts ad impressions to only when
people from Reddit and Hacker News visit. I may have this include
Twitter in the future.

Signed-off-by: Xe <me@christine.website>
2022-06-08 15:02:52 -04:00
Cadey Ratio 396150f72b add rustfmt to runtime closure of devshell
Signed-off-by: Xe Iaso <me@christine.website>
2022-06-08 15:01:21 -04:00
dependabot[bot] 2645966f85
build(deps): bump comrak from 0.12.1 to 0.13.0 (#487)
Bumps [comrak](https://github.com/kivikakk/comrak) from 0.12.1 to 0.13.0.
- [Release notes](https://github.com/kivikakk/comrak/releases)
- [Changelog](https://github.com/kivikakk/comrak/blob/main/changelog.txt)
- [Commits](https://github.com/kivikakk/comrak/compare/0.12.1...0.13.0)

---
updated-dependencies:
- dependency-name: comrak
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-08 13:11:28 -04:00
dependabot[bot] 7d667ef2ae
build(deps): bump http from 0.2.7 to 0.2.8 (#486)
Bumps [http](https://github.com/hyperium/http) from 0.2.7 to 0.2.8.
- [Release notes](https://github.com/hyperium/http/releases)
- [Changelog](https://github.com/hyperium/http/blob/master/CHANGELOG.md)
- [Commits](https://github.com/hyperium/http/compare/v0.2.7...v0.2.8)

---
updated-dependencies:
- dependency-name: http
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-08 13:11:20 -04:00
Jade Lovelace 7bf1f82c90
Fix the flake URL format (#483)
Good old Nix changing things. I've also noted the branch format, since they hard-coded it to master by default.
2022-06-08 13:11:13 -04:00
dependabot[bot] 1bb8e9302c
build(deps): bump tokio-stream from 0.1.8 to 0.1.9 (#484)
Bumps [tokio-stream](https://github.com/tokio-rs/tokio) from 0.1.8 to 0.1.9.
- [Release notes](https://github.com/tokio-rs/tokio/releases)
- [Commits](https://github.com/tokio-rs/tokio/compare/tokio-stream-0.1.8...tokio-stream-0.1.9)

---
updated-dependencies:
- dependency-name: tokio-stream
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-08 13:11:02 -04:00
dependabot[bot] a57512d083
build(deps): bump tower-http from 0.3.3 to 0.3.4 (#488)
Bumps [tower-http](https://github.com/tower-rs/tower-http) from 0.3.3 to 0.3.4.
- [Release notes](https://github.com/tower-rs/tower-http/releases)
- [Commits](https://github.com/tower-rs/tower-http/compare/tower-http-0.3.3...tower-http-0.3.4)

---
updated-dependencies:
- dependency-name: tower-http
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-08 13:10:52 -04:00
Charlotte Som c4cf9c156a
Switch to xeiaso.net links in resume.md (#485)
Fixes Xe/site#482
2022-06-06 07:37:06 -07:00
Cadey Ratio 60196d90ed the oasis
Signed-off-by: Xe Iaso <me@christine.website>
2022-06-03 21:58:45 -04:00
Cadey Ratio 7c7904c17c site update: new domain
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-28 17:04:06 +00:00
Cadey Ratio 7cf1429afa new email
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-28 11:15:59 -04:00
Cadey Ratio 554dbbb53a fix sitemap
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-28 09:50:27 -04:00
Cadey Ratio 2d00c19205 remove old attempt
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-28 09:28:55 -04:00
Cadey Ratio bdc64f78f2 xeiaso.net
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-28 09:17:01 -04:00
Cadey Ratio ff64215d07 ssh key storage
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-28 00:31:23 -04:00
Cadey Ratio a12c957abe kubernetes a heck
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-26 16:00:01 -04:00
Cadey Ratio 71c33c083a templates/mara: use xeiaso.net CDN
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-26 14:40:29 -04:00
Cadey Ratio ac12ebf063 static: xeiaso.net
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-26 14:16:25 -04:00
Cadey Ratio ffab774e6a flake: xeiaso.net
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-26 13:46:17 -04:00
Cadey Ratio f8e2212549 talks: update to xeiaso.net
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-26 13:45:24 -04:00
Cadey Ratio 356f12cbdc blog: update to xeiaso.net
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-26 13:44:47 -04:00
Cadey Ratio 06b00eb7c3 templates: change canonical domain to xeiaso.net
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-26 13:41:26 -04:00
henri b1277d209d
Signal Booster (#480)
added henri shustak
2022-05-25 22:21:10 -04:00
Shadlock0133 901a306002
Fix typo (#481) 2022-05-25 22:05:58 -04:00
Cadey Ratio e545abeb1a templates/{blog,talk}post: clarify time zone
Apparently the fact that my blog runs on UTC is surprising to people.

Signed-off-by: Xe Iaso <me@christine.website>
2022-05-24 17:46:10 -04:00
dependabot[bot] 08d96305af
build(deps): bump http-body from 0.4.4 to 0.4.5 (#479)
Bumps [http-body](https://github.com/hyperium/http-body) from 0.4.4 to 0.4.5.
- [Release notes](https://github.com/hyperium/http-body/releases)
- [Changelog](https://github.com/hyperium/http-body/blob/master/CHANGELOG.md)
- [Commits](https://github.com/hyperium/http-body/compare/v0.4.4...v0.4.5)

---
updated-dependencies:
- dependency-name: http-body
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-23 15:34:44 -04:00
Cadey Ratio 019af1d532 blog/go2: add patrone blurb
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-23 15:05:21 -04:00
Cadey Ratio 7c920ca5e9 blog: against toxicity in programming languages
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-23 15:02:12 -04:00
Cadey Ratio 8c88f66c67 update nixpkgs
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-23 13:01:48 -04:00
Cadey Ratio d4145bd406 blog: update salary history
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-20 12:34:29 -04:00
dependabot[bot] e080361dd4
build(deps): bump axum from 0.5.5 to 0.5.6 (#472)
Bumps [axum](https://github.com/tokio-rs/axum) from 0.5.5 to 0.5.6.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-v0.5.5...axum-v0.5.6)

---
updated-dependencies:
- dependency-name: axum
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-20 09:42:04 -04:00
dependabot[bot] ba49eb8f12
build(deps): bump serde_dhall from 0.11.0 to 0.11.1 (#478)
Bumps [serde_dhall](https://github.com/Nadrieril/dhall-rust) from 0.11.0 to 0.11.1.
- [Release notes](https://github.com/Nadrieril/dhall-rust/releases)
- [Changelog](https://github.com/Nadrieril/dhall-rust/blob/master/CHANGELOG.md)
- [Commits](https://github.com/Nadrieril/dhall-rust/compare/serde_dhall-v0.11.0...serde_dhall-v0.11.1)

---
updated-dependencies:
- dependency-name: serde_dhall
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-20 09:09:57 -04:00
dependabot[bot] 00d101b393
build(deps): bump prometheus from 0.13.0 to 0.13.1 (#477)
Bumps [prometheus](https://github.com/tikv/rust-prometheus) from 0.13.0 to 0.13.1.
- [Release notes](https://github.com/tikv/rust-prometheus/releases)
- [Changelog](https://github.com/tikv/rust-prometheus/blob/master/CHANGELOG.md)
- [Commits](https://github.com/tikv/rust-prometheus/compare/v0.13.0...v0.13.1)

---
updated-dependencies:
- dependency-name: prometheus
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-20 09:09:49 -04:00
dependabot[bot] b53cd41193
build(deps): bump axum-macros from 0.2.1 to 0.2.2 (#476)
Bumps [axum-macros](https://github.com/tokio-rs/axum) from 0.2.1 to 0.2.2.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-macros-v0.2.1...axum-macros-v0.2.2)

---
updated-dependencies:
- dependency-name: axum-macros
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-20 09:09:36 -04:00
dependabot[bot] 09aba58ebd
build(deps): bump axum-extra from 0.3.1 to 0.3.3 (#475)
Bumps [axum-extra](https://github.com/tokio-rs/axum) from 0.3.1 to 0.3.3.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-extra-v0.3.1...axum-extra-v0.3.3)

---
updated-dependencies:
- dependency-name: axum-extra
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-20 09:09:25 -04:00
Cadey Ratio 230df6f54e blog: add 'i fixed the patron page' post
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-18 20:39:12 -04:00
Cadey Ratio 565949cec2 fix patrone
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-18 20:30:16 -04:00
Cadey Ratio e2b9f384bf look for patreon creds in ~ instead of .
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-18 20:17:48 -04:00
Nat Amundsen 0e22f4c224 Remove nat amundsen (#474)
* Add Nat Amundsen

* Update signalboost.dhall
2022-05-18 16:44:55 -04:00
Cadey Ratio 62014b2dba oops lol
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-18 16:44:55 -04:00
Cadey Ratio 62dc15c339 infrastructure
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-18 16:44:55 -04:00
Cadey Ratio 2432b5a4fc shill fly.io
Signed-off-by: Xe <me@christine.website>
2022-05-18 16:44:55 -04:00
Cadey Ratio 7a3d64fec1 Merge pull request 'blog: we already have go 2' (#5) from we-have-go-2 into main
Reviewed-on: cadey/xesite#5
2022-05-18 20:44:41 +00:00
Cadey Ratio b3bdb9388a oh god i'm going to become poor
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-14 13:32:04 +00:00
Cadey Ratio c5cbcc47ec more fixes
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-13 01:39:32 +00:00
Cadey Ratio c5ff939007 blog/we-have-go-2: fix indent
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-13 00:51:42 +00:00
Cadey Ratio c55ef48ba3 blog/we-have-go-2: write generics section
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-13 00:51:42 +00:00
Cadey Ratio bdc85b1536 blog/we-have-go-2: more into generics 2022-05-13 00:51:42 +00:00
Cadey Ratio b1264c8b69 blog/we-have-go-2: more changes from the park 2022-05-13 00:51:42 +00:00
Cadey Ratio 1d0066eeda blog: we already have go 2
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-13 00:51:42 +00:00
Cadey Ratio 43a542e192 stickybox
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-12 21:53:48 +00:00
Cadey Ratio 7e92f3c7a9 adaptive class
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-12 21:43:40 +00:00
Cadey Ratio 737b4700a0 put ads at the end
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-12 21:25:56 +00:00
Cadey Ratio 22ab51b29c oops lol
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-12 21:24:53 +00:00
Cadey Ratio b312b9931d add ethicalads
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-12 21:22:50 +00:00
dependabot[bot] e057c81325
build(deps): bump axum-extra from 0.3.0 to 0.3.1 (#471)
Bumps [axum-extra](https://github.com/tokio-rs/axum) from 0.3.0 to 0.3.1.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-extra-v0.3.0...axum-extra-v0.3.1)

---
updated-dependencies:
- dependency-name: axum-extra
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-11 22:06:26 -04:00
dependabot[bot] fd6d806365
build(deps): bump axum from 0.5.4 to 0.5.5 (#470)
Bumps [axum](https://github.com/tokio-rs/axum) from 0.5.4 to 0.5.5.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-v0.5.4...axum-v0.5.5)

---
updated-dependencies:
- dependency-name: axum
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-11 22:06:22 -04:00
dependabot[bot] e351243126
build(deps): bump axum-macros from 0.2.0 to 0.2.1 (#469)
Bumps [axum-macros](https://github.com/tokio-rs/axum) from 0.2.0 to 0.2.1.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-macros-v0.2.0...axum-macros-v0.2.1)

---
updated-dependencies:
- dependency-name: axum-macros
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-11 22:06:16 -04:00
dependabot[bot] eee6502aa1
build(deps): bump tower-http from 0.3.2 to 0.3.3 (#468)
Bumps [tower-http](https://github.com/tower-rs/tower-http) from 0.3.2 to 0.3.3.
- [Release notes](https://github.com/tower-rs/tower-http/releases)
- [Commits](https://github.com/tower-rs/tower-http/compare/tower-http-0.3.2...tower-http-0.3.3)

---
updated-dependencies:
- dependency-name: tower-http
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-09 07:33:20 -04:00
Charlie Groves c07ac3b6c0
Fix examples to work with Nix >= 2.7 (#467) 2022-05-08 22:18:17 -04:00
Cadey Ratio 06d4bf7d69 src/app/markdown: no-js xeblog-conv support
Thanks to the meddling of @fasterthanlime, I now use lol_html[0] to
parse the <xeblog-conv> elements on the server side instead of on the
client side as an HTML custom element. I will be using this strategy in
the future to expand my blog's functionality and make the christine dot
website cinematic universe stronger.

Signed-off-by: Xe Iaso <me@christine.website>
2022-05-07 14:32:04 +00:00
Cadey Ratio 261d0b65df
Update heroku-devex-2022-05-12.markdown 2022-05-05 17:58:00 -04:00
Nat Amundsen 628b572455
Add Nat Amundsen (#466) 2022-05-05 11:56:41 -04:00
Cadey Ratio 6be7bd7613 blog: I Miss Heroku's DevEx
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-05 14:58:41 +00:00
Cadey Ratio cd1ce7785a add support for pre-publication posts
Signed-off-by: Xe Iaso <me@christine.website>
2022-05-05 14:58:23 +00:00
dependabot[bot] a257536b58
build(deps): bump tower-http from 0.3.1 to 0.3.2 (#465)
Bumps [tower-http](https://github.com/tower-rs/tower-http) from 0.3.1 to 0.3.2.
- [Release notes](https://github.com/tower-rs/tower-http/releases)
- [Commits](https://github.com/tower-rs/tower-http/compare/tower-http-0.3.1...tower-http-0.3.2)

---
updated-dependencies:
- dependency-name: tower-http
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-05-02 07:30:01 -04:00
Cadey Ratio add0fa3e37
Nix flakes 4 wsl (#464)
* nix flakes 4: WSL

Signed-off-by: Xe Iaso <me@christine.website>

* edits

Signed-off-by: Xe Iaso <me@christine.website>

* more edits

Signed-off-by: Xe Iaso <me@christine.website>
2022-05-01 09:12:27 -04:00
Cadey Ratio e03e1712b1 amount
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-30 14:26:49 -04:00
Cadey Ratio dacc7159d7 shitposting as a service
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-30 17:43:30 +00:00
Cadey Ratio 021f70fd90 bump nixpkgs
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-30 14:23:34 +00:00
dependabot[bot] f7aa184c20
build(deps): bump tower-http from 0.3.0 to 0.3.1 (#462)
Bumps [tower-http](https://github.com/tower-rs/tower-http) from 0.3.0 to 0.3.1.
- [Release notes](https://github.com/tower-rs/tower-http/releases)
- [Commits](https://github.com/tower-rs/tower-http/compare/tower-http-0.3.0...tower-http-0.3.1)

---
updated-dependencies:
- dependency-name: tower-http
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-04-29 21:55:44 -04:00
dependabot[bot] 1dfd47708f
build(deps): bump http from 0.2.6 to 0.2.7 (#461)
Bumps [http](https://github.com/hyperium/http) from 0.2.6 to 0.2.7.
- [Release notes](https://github.com/hyperium/http/releases)
- [Changelog](https://github.com/hyperium/http/blob/master/CHANGELOG.md)
- [Commits](https://github.com/hyperium/http/compare/v0.2.6...v0.2.7)

---
updated-dependencies:
- dependency-name: http
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-04-29 21:55:38 -04:00
dependabot[bot] 4d05bd7347
build(deps): bump axum from 0.5.0 to 0.5.4 (#459)
Bumps [axum](https://github.com/tokio-rs/axum) from 0.5.0 to 0.5.4.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-v0.5.0...axum-v0.5.4)

---
updated-dependencies:
- dependency-name: axum
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-04-27 20:05:04 -04:00
dependabot[bot] 22c29ce498
build(deps): bump eyre from 0.6.7 to 0.6.8 (#452)
Bumps [eyre](https://github.com/yaahc/eyre) from 0.6.7 to 0.6.8.
- [Release notes](https://github.com/yaahc/eyre/releases)
- [Changelog](https://github.com/yaahc/eyre/blob/master/CHANGELOG.md)
- [Commits](https://github.com/yaahc/eyre/compare/v0.6.7...v0.6.8)

---
updated-dependencies:
- dependency-name: eyre
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-04-27 09:30:31 -04:00
dependabot[bot] 1019e42fa7
build(deps): bump tower-http from 0.2.5 to 0.3.0 (#458)
Bumps [tower-http](https://github.com/tower-rs/tower-http) from 0.2.5 to 0.3.0.
- [Release notes](https://github.com/tower-rs/tower-http/releases)
- [Commits](https://github.com/tower-rs/tower-http/compare/tower-http-0.2.5...tower-http-0.3.0)

---
updated-dependencies:
- dependency-name: tower-http
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-04-27 09:30:05 -04:00
dependabot[bot] 4170e3b78c
build(deps): bump axum-extra from 0.2.0 to 0.3.0 (#460)
Bumps [axum-extra](https://github.com/tokio-rs/axum) from 0.2.0 to 0.3.0.
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-extra-v0.2.0...axum-extra-v0.3.0)

---
updated-dependencies:
- dependency-name: axum-extra
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-04-27 09:06:44 -04:00
Cadey Ratio ad2f5c739f twitter, mastodon, rasengan
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-26 01:02:27 +00:00
Cadey Ratio 05135edcbe i am good at the computer boxes
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-25 06:03:36 -04:00
Cadey Ratio 9566b790bc gonads better
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-24 20:36:41 -04:00
Cadey Ratio 67de839da8 gonads
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-25 00:17:52 +00:00
Cadey Ratio f590fc71d1 fix devshell
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-22 23:39:08 +00:00
Cadey Ratio 2e539512b7 convert to flakes
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-22 23:36:25 +00:00
Cadey Ratio f51752ed3c stop using politics as a cudgel to discourage experimentation
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-21 12:53:15 +00:00
dependabot[bot] e825b1b904
build(deps): bump tracing-subscriber from 0.3.9 to 0.3.11 (#454)
Bumps [tracing-subscriber](https://github.com/tokio-rs/tracing) from 0.3.9 to 0.3.11.
- [Release notes](https://github.com/tokio-rs/tracing/releases)
- [Commits](https://github.com/tokio-rs/tracing/compare/tracing-subscriber-0.3.9...tracing-subscriber-0.3.11)

---
updated-dependencies:
- dependency-name: tracing-subscriber
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-04-13 07:51:14 -04:00
Cadey Ratio 20aeb35890 css: fix conversation width in the HTML element
> Add min-width:0 to .conversation-chat in shim.css. This is not only a
> fix, but the correct fix.
>
> But WTF?
>
> Well quite simply, the default value of min-width is auto, which
> normally acts like 0, but for a flex item, auto becomes something more
> along the lines of min-content (except special complicated rules that I
> don't fully understand apply). The net result is that the minimum width
> prevents the code boxes in the conversation from using their overflow:
> auto to enable scrolling. Setting min-width: 0 is effectively just
> turning this special rule off, and causing it use more normal box sizing
> rules.
>
> I hate how weird and impossible to understand the css box model is.

Signed-off-by: Xe Iaso <me@christine.website>
2022-04-07 17:53:12 +00:00
Cadey Ratio 449ddabce1
Nix flakes 3 (#453)
* blog: add third nix flakes post

Signed-off-by: Xe <me@christine.website>

* make nix flakes post 3 better, thanks open

Signed-off-by: Xe Iaso <me@christine.website>
2022-04-06 21:43:45 -04:00
Cadey Ratio 6b771b5503 fix better????
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-05 21:02:14 -04:00
Cadey Ratio ea8e1e045a blow up without patrone
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-05 20:59:35 -04:00
Cadey Ratio 3a4827c887 log refresh token errors
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-05 20:53:47 -04:00
Cadey Ratio fd6ac469a6 fix???
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-05 20:40:18 -04:00
Cadey Ratio fa2ada9747 don't read patreon creds from envvars
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-05 20:35:07 -04:00
Cadey Ratio 3a5c7adc42 tolerate no patrone creds
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-06 00:01:52 +00:00
Cadey Ratio e5ee825c0a signal boost: make links optional
Closes #326
Closes #209

Signed-off-by: Xe <me@christine.website>
2022-04-02 16:47:54 +00:00
Cadey Ratio e665412345 blog/backslash-kubernetes: change title
Closes #394

Signed-off-by: Xe Iaso <me@christine.website>
2022-04-02 16:36:41 +00:00
kjain 828a5f277e
add kjain (#437) 2022-04-02 12:25:48 -04:00
Cadey Ratio 7c90296bf0 Update to Axum 0.5
Closes #446
Closes #447
Closes #448

Signed-off-by: Xe <me@christine.website>
2022-04-02 16:24:39 +00:00
Cadey Ratio 1c8c3396a7 lib/patreon: refresh token support
This should hopefully make the patrons page work consistently and no
longer require me to manually update the patreon token once per month.
Why didn't I do this age ago??????

Hacked up live on twitch: https://twitch.tv/princessxen

Closes #442

Signed-off-by: Xe <me@christine.website>
2022-04-02 16:15:10 +00:00
Cadey Ratio 0c0c5875e6 oops lol
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-01 20:34:25 -04:00
Cadey Ratio 7fdae76543 blog: suggestions for recruiters
Signed-off-by: Xe Iaso <me@christine.website>
2022-04-01 19:44:34 -04:00
Cadey Ratio 1bedcb6a25 compiling matter in my living room
Signed-off-by: Christine Dodrill <me@christine.website>
2022-03-28 18:47:50 -04:00
Martin Schwaighofer 66574582f2
nix flakes can access private repos with git+ssh (#444)
Sadly using ssh+git does not work. I tested this locally.
See also: https://discourse.nixos.org/t/url-format-for-flake-over-git-ssh/7538/2
2022-03-26 14:30:54 -04:00
159 changed files with 7580 additions and 1018 deletions

2
.envrc
View File

@ -1 +1 @@
eval "$(lorri direnv)" use flake

2
.gitignore vendored
View File

@ -6,3 +6,5 @@ cw.tar
/result /result
.#* .#*
/target /target
.patreon.json
.direnv

1065
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -9,13 +9,14 @@ repository = "https://github.com/Xe/site"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
axum = "0.4" axum = { version = "0.5", features = ["headers"] }
axum-macros = "0.1" axum-macros = "0.2"
axum-extra = "0.1" axum-extra = "0.3"
color-eyre = "0.6" color-eyre = "0.6"
chrono = "0.4" chrono = "0.4"
comrak = "0.12.1" comrak = "0.14.0"
derive_more = "0.99" derive_more = "0.99"
dirs = "4"
envy = "0.4" envy = "0.4"
estimated_read_time = "1" estimated_read_time = "1"
futures = "0.3" futures = "0.3"
@ -26,11 +27,14 @@ hyper = "0.14"
kankyo = "0.3" kankyo = "0.3"
lazy_static = "1.4" lazy_static = "1.4"
log = "0.4" log = "0.4"
lol_html = "0.3"
maud = { version = "0.23.0", features = ["axum"] }
mime = "0.3.0" mime = "0.3.0"
prometheus = { version = "0.13", default-features = false, features = ["process"] } prometheus = { version = "0.13", default-features = false, features = ["process"] }
rand = "0" rand = "0"
regex = "1"
reqwest = { version = "0.11", features = ["json"] } reqwest = { version = "0.11", features = ["json"] }
serde_dhall = "0.11.0" serde_dhall = "0.11.2"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_yaml = "0.8" serde_yaml = "0.8"
sitemap = "0.4" sitemap = "0.4"
@ -44,9 +48,11 @@ xml-rs = "0.8"
url = "2" url = "2"
uuid = { version = "0.8", features = ["serde", "v4"] } uuid = { version = "0.8", features = ["serde", "v4"] }
xesite_types = { path = "./lib/xesite_types" }
# workspace dependencies # workspace dependencies
cfcache = { path = "./lib/cfcache" } cfcache = { path = "./lib/cfcache" }
jsonfeed = { path = "./lib/jsonfeed" } xe_jsonfeed = { path = "./lib/jsonfeed" }
mi = { path = "./lib/mi" } mi = { path = "./lib/mi" }
patreon = { path = "./lib/patreon" } patreon = { path = "./lib/patreon" }
@ -55,7 +61,7 @@ version = "0.4"
features = [ "full" ] features = [ "full" ]
[dependencies.tower-http] [dependencies.tower-http]
version = "0.2" version = "0.3"
features = [ "full" ] features = [ "full" ]
# os-specific dependencies # os-specific dependencies

View File

@ -0,0 +1,93 @@
---
title: Compiling Code to Matter in My Living Room
date: 2022-03-28
tags:
- openscad
- 3dprinting
---
In a moment of weakness, my husband and I got a 3d printer. It's mostly been sitting around and not doing much since we got it, but recently I found a great use for it: I wanted a controller stand for my Valve Index controllers and VR full body trackers.
After doing some digging on Thingiverse, I found [this stand](https://www.thingiverse.com/thing:4587097) that looked like it had promise. So I downloaded the model, sliced it and then sent it over to Kyubey:
<blockquote class="twitter-tweet"><p lang="tl" dir="ltr">Kyuubey is happy <a href="https://t.co/atTLN8MSgc">pic.twitter.com/atTLN8MSgc</a></p>&mdash; Xe Iaso (@theprincessxena) <a href="https://twitter.com/theprincessxena/status/1507485129907871747?ref_src=twsrc%5Etfw">March 25, 2022</a></blockquote> <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
[Kyubey's name is a reference to <a href="https://madoka.fandom.com/wiki/Kyubey">Kyubey</a> from Puella Magi Madoka Magika</a>.](conversation://Mara/hacker)
Once it was done I ended up with a stand that I could feed [these cables I got from Amazon](https://www.amazon.ca/gp/product/B09LSF8XL9/) through. The tracker holes worked great, but the controller holes were just barely too small.
This was kinda frustrating and I almost gave up on the project, but then I remembered that [OpenSCAD](https://openscad.org) existed. OpenSCAD is a weird programming environment / 3D modeling hybrid program that I've seen used on Thingiverse. It works by letting you position platonic solids into a 3d environment, and from there you can create anything you want.
One of the primitives that OpenSCAD offers is a cylinder. So I wondered if I could use one of those to widen the hole in the index stand and then reprint the part with the wider hole.
[Wait, you're using a CAD program to fix your 3D print by modifying the model instead of using, I don't know, a drill and 5 minutes to make it fit that way?](conversation://Numa/dismay)
[There's no doing like overdoing!](conversation://Cadey/enby)
After some finangling, I managed to get the cylinders in the right place with this OpenSCAD code:
```scad
//difference() {
color("magenta") translate([0, 0, 0]) import("./assets/ValveTrackerDeckEditedByInugoro.stl");
// bores for controller holders
color([0, 1, 0]) translate([63, 44, 0]) cylinder(h = 55, r = 4.75);
color([0, 1, 0]) translate([-63, 44, 0]) cylinder(h = 55, r = 4.75);
//}
```
<blockquote class="twitter-tweet"><p lang="en" dir="ltr">Some finagling required <a href="https://t.co/7T0R6x1XoP">pic.twitter.com/7T0R6x1XoP</a></p>&mdash; Xe Iaso (@theprincessxena) <a href="https://twitter.com/theprincessxena/status/1508566854926745614?ref_src=twsrc%5Etfw">March 28, 2022</a></blockquote> <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
And when I uncommented out the `difference()` block, it ends up looking good enough:
<blockquote class="twitter-tweet" data-conversation="none" data-dnt="true"><p lang="und" dir="ltr"><a href="https://t.co/fiShvlN8QH">pic.twitter.com/fiShvlN8QH</a></p>&mdash; Xe Iaso (@theprincessxena) <a href="https://twitter.com/theprincessxena/status/1508567556759728141?ref_src=twsrc%5Etfw">March 28, 2022</a></blockquote> <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
So then I took a good solid look at the rest of the 3D printed part to see if I could improve on anything else before I sent it to another round of the printer. The last stand took _14 hours_ to print and used a lot of material. I want to avoid waste.
Something I noticed is that the front of the print where all the cables come out was a bit too thin. All 5 of the cables wouldn't fit in there (my braided cables must have been thicker than the ones that the original modeler used). So again I grabbed a few platonic solids and managed to make it work out:
```scad
// widen the paths
color("green") translate([0, -16, 1.3]) rotate([0, 0, 90]) cube([10, 57, 7.8], center = true);
color("green") translate([0, 0, 1.7]) rotate([0, 0, 0]) cube([25, 30, 7], center = true);
```
<blockquote class="twitter-tweet" data-conversation="none" data-dnt="true"><p lang="und" dir="ltr"><a href="https://t.co/pKAVtiPfDS">pic.twitter.com/pKAVtiPfDS</a></p>&mdash; Xe Iaso (@theprincessxena) <a href="https://twitter.com/theprincessxena/status/1508568858650685440?ref_src=twsrc%5Etfw">March 28, 2022</a></blockquote> <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
Then I wanted to add some wedges into the underside of the part to help me get the print off the bed. Most people have a problem with bed adhesion being too little. I have too much bed adhesion. So I added some angled rectangles:
```scad
// wedges to help get the print off the bed
color([1, 1, 0]) translate([-120, 0, 0]) rotate([15, 0, 90]) cube([10, 11, 2], center = true); // right
color([1, 1, 0]) translate([120, 0, 0]) rotate([-15, 0, 90]) cube([10, 11, 2], center = true); // left
color([1, 1, 0]) translate([0, -85, 0]) rotate([0, 15, 90]) cube([10, 11, 2], center = true); // back
color([1, 1, 0]) translate([60, 56, 1]) rotate([0, -15, 90]) cube([10, 11, 2], center = true); // front left
color([1, 1, 0]) translate([-60, 56, 1]) rotate([0, -15, 90]) cube([10, 11, 2], center = true); // front right
color([1, 1, 0]) translate([32.5, 41, 1]) rotate([0, -15, 130]) cube([10, 11, 2], center = true); // front left inner
color([1, 1, 0]) translate([-32.5, 41, 1]) rotate([0, -15, 60]) cube([10, 11, 2], center = true); // front right inner
```
<blockquote class="twitter-tweet" data-conversation="none" data-dnt="true"><p lang="und" dir="ltr"><a href="https://t.co/XUQ9ZeYk1H">pic.twitter.com/XUQ9ZeYk1H</a></p>&mdash; Xe Iaso (@theprincessxena) <a href="https://twitter.com/theprincessxena/status/1508569796253827077?ref_src=twsrc%5Etfw">March 28, 2022</a></blockquote> <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
And then once I spun it around for a bit and thought it was good, I sliced it in PrusaSlicer and sent it off to Kyubey. It was going to take 14 hours, so I went off to do other things, ate dinner and then went to bed while the printer continued.
<blockquote class="twitter-tweet" data-conversation="none" data-dnt="true"><p lang="fr" dir="ltr">Diligent bean <a href="https://t.co/yPgnJA0ZdW">pic.twitter.com/yPgnJA0ZdW</a></p>&mdash; Xe Iaso (@theprincessxena) <a href="https://twitter.com/theprincessxena/status/1508397506031460352?ref_src=twsrc%5Etfw">March 28, 2022</a></blockquote> <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
Then when I woke up, Kyubey was done:
<blockquote class="twitter-tweet" data-conversation="none" data-dnt="true"><p lang="und" dir="ltr"><a href="https://t.co/2E1IS810EH">pic.twitter.com/2E1IS810EH</a></p>&mdash; Xe Iaso (@theprincessxena) <a href="https://twitter.com/theprincessxena/status/1508407046995156992?ref_src=twsrc%5Etfw">March 28, 2022</a></blockquote> <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
I was excited and chiseled the print off the bed (the wedges helped a little, but it ended up making the print look kinda weird so I don't know if I will do that again), but the hole for the middle tracker didn't fit perfectly. Everything else did though.
[If you want to get prints off your printer easier, see this video for the method we're starting to use: <br /><br /><iframe width="560" height="315" src="https://www.youtube.com/embed/VCCbzCvtRzU" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>](conversation://Mara/hacker)
I looked on my desk and found that a random pen that I had sitting around for months was about the right size, so I pushed it into and out of the hole a few times and then the cables fit perfectly. I assume some plastic was in a weird state or something.
Then I set everything up and I had my Index controller stand:
<blockquote class="twitter-tweet" data-conversation="none" data-dnt="true"><p lang="en" dir="ltr">Victory! <a href="https://t.co/A3aCtQMQt5">pic.twitter.com/A3aCtQMQt5</a></p>&mdash; Xe Iaso (@theprincessxena) <a href="https://twitter.com/theprincessxena/status/1508426229464064001?ref_src=twsrc%5Etfw">March 28, 2022</a></blockquote> <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
[I really need to get a table or something for this.](conversation://Cadey/facepalm)
I've uploaded my modified version to [Thingiverse](https://www.thingiverse.com/thing:5332988). If you want to see the OpenSCAD code, you can check it out on GitHub [here](https://github.com/Xe/3dstuff/blob/main/index_stand_hack.scad). I'm really liking OpenSCAD so far. It's very weird but it lets you do whatever you want by chaining together basic shapes to build up to what you want. I imagine I will be using it a lot in the future, especially once my husband's new sim racing gear comes in.
Having a 3D printer around is like having a very weird superpower on standby. You can compile matter in your living room, but you need a very pedantic description of what that should look like. You also can have any material you like as long as it's plastic. However when it's useful, it's a lifesaver. You can make something to fit a gap or mend something broken or even add functionality to something that lacked it. The cloud's the limit!

View File

@ -7,7 +7,7 @@ tags:
--- ---
[*Last time in the christine dot website cinematic [*Last time in the christine dot website cinematic
universe:*](https://christine.website/blog/unix-domain-sockets-2021-04-01) universe:*](https://xeiaso.net/blog/unix-domain-sockets-2021-04-01)
*Unix sockets started to be used to grace the cluster. Things were at peace. *Unix sockets started to be used to grace the cluster. Things were at peace.
Then, a realization came through:* Then, a realization came through:*

View File

@ -0,0 +1,65 @@
---
title: "My Stance on Toxicity About Programming Languages"
date: 2022-05-23
tags:
- toxicity
- culture
---
I have been toxic and hateful in the past about programming language choice. I
now realize this is a mistake. I am sorry if my being toxic about programming
languages has harmed you.
By toxic, I mean doing things or saying things that imply people are lesser for
having different experience and preferences about programming languages. I have
seen people imply that using languages like PHP or Node.js means that they are
idiots or similar. This is toxic behavior and I do not want to be a part of it
in the future.
I am trying to not be toxic about programming languages in the future. Each
programming language is made to solve the tasks it was designed to solve and
being toxic about it helps nobody. By being toxic about programming languages
like this, I only serve to spread toxicity and then see it be repeated as the
people that look up to me as a role model will then strive to repeat my
behavior. This cannot continue. I do not want my passion projects to become
synonymous with toxicity and vitriol. I do not want to be known as the person
that hates $PROGRAMMING_LANGUAGE. I want to break the cycle.
With this, I want to confirm that I will not write any more attack articles
about programming languages. I am doing my best to ensure that this will also
spread to my social media actions, conference talks and as many other things as
I can.
I challenge all of you readers to take this challenge too. Don't spread toxicity
about programming languages. All of the PHP hate out there is a classic example
of this. PHP is a viable programming language that is used by a large percentage
of the internet. By insinuating that everyone using PHP is inferior (or worse)
you only serve to push people away and worst case cause them to be toxic about
the things you like. Toxicity breeds toxicity and the best way to stop it is to
be the one to break the cycle and have others follow in your footsteps.
I have been incredibly toxic about PHP in the past. PHP is one of if not the
most widely used programming languages for writing applications that run on a
web server. Its design makes it dead simple to understand how incoming HTTP
requests relate to files on the disk. There is no compile step. The steps to
make a change are to open the file on the server, make the change you want to
see and press F5. This is a developer experience that is unparalleled in most
HTTP frameworks that I've seen in other programming environments. PHP users
deserve better than to be hated on. PHP is an incredibly valid choice and I'm
sure that with the right linters and human review in the mix it can be as secure
as "properly written" services in Go, Java and Rust.
<xeblog-conv name="Cadey" mood="enby">Take the "don't be toxic about programming
languages" challenge! Just stop with the hate, toxicity and vitriol. Our jobs
are complicated enough already. Being toxic to eachother about how we decide to
solve problems is a horrible life decision at the least and actively harmful to
people's careers at most. Just stop.</xeblog-conv>
---
<xeblog-conv name="Mara" mood="hacker">This post is not intended as a sub-blog.
If you feel that this post is calling you out, please don't take this
personally. There is a lot of toxicity out there and it will take a long time to
totally disarm it, even with people dedicated to doing it. This is an adaptation
of [this twitter
thread](https://twitter.com/theprincessxena/status/1527765025561186304).</xeblog-conv>

View File

@ -0,0 +1,175 @@
---
title: "Anbernic Win600 First Impressions"
date: 2022-07-14
series: reviews
---
Right now PC gaming is largely a monopoly centered around Microsoft Windows.
Many PC games only support Windows and a large fraction of them use technical
means to prevent gamers on other platforms from playing those games. In 2021,
Valve introduced the [Steam Deck](https://www.steamdeck.com/en/) as an
alternative to that monopoly. There's always been a small, underground market
for handheld gaming PCs that let you play PC games on the go, but it's always
been a very niche market dominated by a few big players that charge a lot of
money relative to the game experience they deliver. The Steam Deck radically
changed this equation and it's still on backorder to this day. This has made
other manufacturers take notice and one of them was Anbernic.
[Anbernic](https://anbernic.com/) is a company that specializes in making retro
emulation handheld gaming consoles. Recently they released their
[Win600](https://anbernic.com/products/new-anbernic-win600) handheld. It has a
Radeon Silver 3020e or a Radeon Silver 3050e and today I am going to give you my
first impressions of it. I have the 3050e version.
<xeblog-conv name="Mara" mood="happy">A review of the Steam Deck is coming up
soon!</xeblog-conv>
One of the real standout features of this device is that Anbernic has been
working with Valve to allow people to run SteamOS on it! This makes us all one
step closer to having a viable competitor to Windows for gaming. SteamOS is
fantastic and has revolutionized gaming on Linux. It's good to see it coming to
more devices.
## Out of the box
I ordered my Win600 about 3 hours after sales opened. It arrived in a week and
came in one of those china-spec packages made out of insulation. If you've ever
ordered things from AliExpress you know what I'm talking about. It's just a
solid mass of insulation.
[![The console siting on my desk with its charging brick and included cable](https://cdn.xeiaso.net/file/christine-static/img/FXozAEjUsAQEg9d-smol.jpeg)](https://cdn.xeiaso.net/file/christine-static/img/FXozAEjUsAQEg9d.jpeg)
The unboxing experience was pretty great. The console came with:
* The console
* A box containing the charger and cable
* A slip of paper telling you how to set up windows without a wifi connection
* A screen protector and cleaning cloth (my screen protector was broken in the
box, so much for all that insulation lol)
* A user manual that points out obvious things about your device
One of the weirder things about this device is the mouse/gamepad slider on the
side. It changes the USB devices on the system and makes the gamepad either act
like an xinput joypad or a mouse and keyboard. The mouse and keyboard controls
are strange. Here are the controls I have discovered so far:
* R1 is right click
* L1 is left click
* A is enter
* The right stick very slowly skitters the mouse around the screen
* The left stick is a super aggressive scroll wheel
Figuring out these controls on the fly without any help from the manual meant
that I had taken long enough in the setup screen that [Cortana started to pipe
up](https://youtu.be/yn6bSm9HXFg) and guided me through the setup process. This
was not fun. I had to connect an external keyboard to finish setup.
<xeblog-conv name="Cadey" mood="coffee">This is probably not Anbernic's fault.
Windows is NOT made for smaller devices like this and oh god it
shows.</xeblog-conv>
## Windows 10 "fun"
There is also a keyboard button on the side. When you are using windows this
button summons a soft keyboard. Not the nice to use modern soft keyboard though,
the legacy terrible soft keyboard that Microsoft has had for forever and never
really updated. Using it is grating, like rubbing sandpaper all over your hands.
This made entering in my Wi-Fi password an adventure. It took my husband and I
15 minutes to get the device to connect to Wi-Fi. 15 minutes to connect to
Wi-Fi.
Once it was connected to Wi-Fi, I tried to update the system to the latest
version of windows. The settings update crashed. Windows Update's service also
crashed. Windows Update also randomly got stuck trying to start the installation
process for updates. Once updates worked and finished installing, I rebooted.
I tried to clean up the taskbar by disabling all of the random icons that
product managers at Microsoft want you to see. The Cortana button was stuck on
and I was unable to disable it. Trying to hide the Windows meet icon crashed
explorer.exe. I don't know what part of this is Windows going out of its way to
mess with me (I'm cursed) and what part of it is Windows really not being
optimized for this hardware in any sense of the way.
Windows is really painful on this device. It's obvious that Windows was not made
with this device in mind. There are buttons to hack around this (but not as far
as the task manager button I've seen on other handhelds), but overall trying to
use Windows with a game console is like trying to saw a log with a pencil
sharpener. It's just the wrong tool for the job. Sure you _can_ do it, but _can_
and _should_ are different words in English.
Another weird thing about Windows on this device is that the screen only reports
a single display mode: 1280x720. It has no support for lower resolutions to run
older games that only work on those lower resolutions. In most cases this will
be not an issue, but if you want to lower the resolution of a game to squeeze
more performance out then you may have issues.
## Steam
In a moment of weakness, I decided to start up Steam. Steam defaulted to Big
Picture mode and its first-time-user-experience made me set up Wi-Fi again.
There was no way to bypass it. I got out my moonlander again and typed in my
Wi-Fi password again, and then I downloaded Sonic Adventure 2 as a test for how
games feel on it. Sonic Adventure 2 is a very lightweight game (you can play it
for like 6.5 hours on a full charge of the Steam Deck) and I've played it to
_death_ over the years. I know how the game _should_ feel.
[![The starting screen of City Escape in Sonic Adventure 2, with the console propped up with a Steam Controller](https://cdn.xeiaso.net/file/christine-static/img/FXpIjhwUIAUEujg-smol.jpeg)](https://cdn.xeiaso.net/file/christine-static/img/FXpIjhwUIAUEujg.jpeg)
City Escape ran at a perfect 60 FPS at the device's native resolution. The main
thing I noticed though was the position of the analog sticks. Based on the
design of the device, I'm pretty sure they were going for something with a
PlayStation DualShock 4 layout with the action buttons on the top and the sticks
on the bottom. The sticks are too far down on the device. Playing Sonic
Adventure 2 was kind of painful.
## SteamOS
So I installed SteamOS on the device. Besides a weird issue with 5 GHZ Wi-Fi not
working and updates requiring me to reboot the device IMMEDIATELY after
connecting to Wi-Fi, it works great. I can install games and they run. The DPI
for SteamOS is quite wrong though. All the UI elements are painfully small. For
comparison, I put my Steam Deck on the same screen as I had on the Win600. The
Steam Deck is on top and the Win600 is on the bottom.
[![The game overview for Sonic Adventure 2 on both the Steam Deck and Anbernic Win600](https://f001.backblazeb2.com/file/christine-static/img/FXqSz_tVsAAU0wf-smol.jpeg)](https://f001.backblazeb2.com/file/christine-static/img/FXqSz_tVsAAU0wf.jpeg)
Yeah. It leaves things to be desired.
When I had SteamOS set up, I did find something that makes the Win600 slightly
better than the Steam Deck. When you are adding games to Steam with Emulation
Station you need to close the Steam client to edit the leveldb files that Steam
uses to track what games you can launch. On the Steam Deck, the Steam client
also enables the built-in controllers to act as a keyboard and mouse. This means
that you need to poke around and pray with the touchscreen to get EmuDeck games
up and running. The mouse/controller switch on the Win600 makes this slightly
more convenient because the controllers can always poorly act as a mouse and
keyboard.
When you are in KDE on the Win600, you don't get a soft keyboard at all. This is
mildly inconvenient, but can be fixed with the moonlander yet again. Here's a
screenshot of what my KDE desktop on the Win600 looks like:
[![My SteamOS desktop on the Win600, showing Crossette from Xenoblade Chronicles 2 center frame](https://f001.backblazeb2.com/file/christine-static/img/Screenshot_20220714_144655-smol.jpg)](https://f001.backblazeb2.com/file/christine-static/img/Screenshot_20220714_144655.png)
Overall, SteamOS is a lot more ergonomic in my opinion and will let you play
games to your heart's content.
The D-pad feels really good. I love how it responds. When I did a little bit of
Sonic Mania I never felt like I was inaccurate. There were some weird audio
hitches on Sonic Mania though where the music would cut out randomly. Not sure
what's going on with that. I could play through entire Pokemon games with that
D-pad.
## Conclusions for now
Overall I'm getting the feeling that this device is _okay_. It's not great, it's
not terrible, but it's okay. I need to get some more experience with it, but so
far it seems that this device really does have a weight class and oh god if you
play a game outside its weight class your UX goes to shit instantly. The battery
life leaves _a lot_ to be desired so far. However it does work. It's hard to not
compare this to the Steam Deck, but it's so much less in comparison to the Steam
Deck.
I don't know how I feel about this device. I'm not sure it's worth the money. I
need to get more experience with it. I'll have a better sense of all this when I
write my full review. Stay tuned for that!

View File

@ -21,7 +21,7 @@ up being the _worst_ experience that I have using an aarch64 MacBook.
[This website](https://github.com/Xe/site) is a fairly complicated webapp [This website](https://github.com/Xe/site) is a fairly complicated webapp
written in Rust. As such it makes for a fairly decent compile stress test. I'm written in Rust. As such it makes for a fairly decent compile stress test. I'm
going to do a compile test against my [Ryzen going to do a compile test against my [Ryzen
3600](https://christine.website/blog/nixos-desktop-flow-2020-04-25) with this M1 3600](https://xeiaso.net/blog/nixos-desktop-flow-2020-04-25) with this M1
MacBook Air. MacBook Air.
My tower is running this version of Rust: My tower is running this version of Rust:

View File

@ -1,5 +1,5 @@
--- ---
title: "&lt;/kubernetes&gt;" title: "Goodbye Kubernetes"
date: 2021-01-03 date: 2021-01-03
--- ---
@ -181,7 +181,7 @@ server, my kubernetes cluster and my dokku server:
- hlang -> https://h.christine.website - hlang -> https://h.christine.website
- mi -> https://mi.within.website - mi -> https://mi.within.website
- printerfacts -> https://printerfacts.cetacean.club - printerfacts -> https://printerfacts.cetacean.club
- xesite -> https://christine.website - xesite -> https://xeiaso.net
- graphviz -> https://graphviz.christine.website - graphviz -> https://graphviz.christine.website
- idp -> https://idp.christine.website - idp -> https://idp.christine.website
- oragono -> ircs://irc.within.website:6697/ - oragono -> ircs://irc.within.website:6697/

View File

@ -0,0 +1,125 @@
---
title: "Site Update: The Big Domain Move To xeiaso.net"
date: 2022-05-28
tags:
- dns
---
Hello all!
If you take a look in the URL bar of your browser (or on the article URL section
of your feed reader), you should see that there is a new domain name! Welcome to
[xeiaso.net](https://xeiaso.net)!
Hopefully nothing broke in the process of moving things over, I tried to make
sure that everything would forward over and today I'm going to explain how I did
that.
I have really good SEO on my NixOS articles, and for my blog in general. I did
not want to risk tanking that SEO when I moved domain names, so I have been
putting this off for the better part of a year. As for why now? I got tired of
internets complaning that the URL was "christine dot website" when I wanted to
be called "Xe". Now you have no excuse.
So the first step was to be sure that everything got forwarded over to the new
domain. After buying the domain name and setting everything up in Cloudflare
(including moving my paid plan over), I pointed the new domain at my server and
then set up a new NixOS configuration block to have that domain name point to my
site binary:
```nix
services.nginx.virtualHosts."xeiaso.net" = {
locations."/" = {
proxyPass = "http://unix:${toString cfg.sockPath}";
proxyWebsockets = true;
};
forceSSL = cfg.useACME;
useACMEHost = "xeiaso.net";
extraConfig = ''
access_log /var/log/nginx/xesite.access.log;
'';
};
```
After that was working, I then got a list of all the things that probably
shouldn't be redirected from. In most cases, most HTTP clients should do the
right thing when getting a permanent redirect to a new URL. However, we live in
a fallen world where we cannot expect clients to do the right thing. Especially
RSS feed readers.
So I made a list of all the things that I was afraid to make permanent redirects
for and here it is:
* `/jsonfeed` - a JSONFeed package for Go
([docs](https://pkg.go.dev/christine.website/jsonfeed)), I didn't want to
break builds by issuing a permanent redirect that would not match the
[go.mod](https://tulpa.dev/Xe/jsonfeed/src/branch/master/go.mod) file.
* `/.within/health` - the healthcheck route used by monitoring. I didn't want to
find out if NodePing blew up on a 301.
* `/.within/website.within.xesite/new_post` - the URL used by the [Android
app](https://play.google.com/store/apps/details?id=website.christine.xesite)
widget to let you know when a new post is published. I didn't want to find out
if Android's HTTP library handles redirects properly or not.
* `/blog.rss` - RSS feed readers are badly implemented. I didn't want to find
out if it would break people's feeds entirely. I actually care about people
that read this blog over RSS and I'm sad that poorly written feed readers
punish this server so much.
* `/blog.atom` - See above.
* `/blog.json` - See above.
Now that I have the list of URLs to not forward, I can finally write the small
bit of Nginx config that will set up permanent forwards (HTTP status code 301)
for every link pointing to the old domain. It will look something like this:
```nginx
location / {
return 301 https://xeiaso.net$request_uri;
}
```
<xeblog-conv name="Mara" mood="hacker">Note that it's using `$request_uri` and
not just `$uri`. If you use `$uri` you run the risk of [CRLF
injection](https://reversebrain.github.io/2021/03/29/The-story-of-Nginx-and-uri-variable/),
which will allow any random attacker to inject HTTP headers into incoming
requests. This is not a good thing to have happen, to say the
least.</xeblog-conv>
So I wrote a little bit of NixOS config that automatically bridges the gap:
```nix
services.nginx.virtualHosts."christine.website" = let proxyOld = {
proxyPass = "http://unix:${toString cfg.sockPath}";
proxyWebsockets = true;
}; in {
locations."/jsonfeed" = proxyOld;
locations."/.within/health" = proxyOld;
locations."/.within/website.within.xesite/new_post" = proxyOld;
locations."/blog.rss" = proxyOld;
locations."/blog.atom" = proxyOld;
locations."/blog.json" = proxyOld;
locations."/".extraConfig = ''
return 301 https://xeiaso.net$request_uri;
'';
forceSSL = cfg.useACME;
useACMEHost = "christine.website";
extraConfig = ''
access_log /var/log/nginx/xesite_old.access.log;
'';
};
```
This will point all the scary paths to the site itself and have
`https://christine.website/whatever` get forwarded to
`https://xeiaso.net/whatever`, this makes sure that every single link that
anyone has ever posted will get properly forwarded. This makes link rot
literally impossible, and helps ensure that I keep my hard-earned SEO.
I also renamed my email address to `me@xeiaso.net`. Please update your address
books and spam filters accordingly. Also update my name to `Xe Iaso` if you
haven't already.
I've got some projects in the back burner that will make this blog even better!
Stay tuned and stay frosty.
What was formerly known as the "christine dot website cinematic universe" is now
known as the "xeiaso dot net cinematic universe".

View File

@ -78,7 +78,7 @@ for this:
``` ```
Xe Iaso (zi ai-uh-so) Xe Iaso (zi ai-uh-so)
https://christine.website https://xeiaso.net
.i la budza pu cusku lu .i la budza pu cusku lu
<<.i ko snura .i ko kanro <<.i ko snura .i ko kanro

View File

@ -36,7 +36,7 @@ Your website should include at least the following things:
- Links to or words about projects of yours that you are proud of - Links to or words about projects of yours that you are proud of
- Some contact information (an email address is a good idea too) - Some contact information (an email address is a good idea too)
If you feel comfortable doing so, I'd also suggest putting your [resume](https://christine.website/resume) If you feel comfortable doing so, I'd also suggest putting your [resume](https://xeiaso.net/resume)
on this site too. Even if it's just got your foodservice jobs or education on this site too. Even if it's just got your foodservice jobs or education
history (including your high school diploma if need be). history (including your high school diploma if need be).
@ -47,7 +47,7 @@ not.
## Make a Tech Blog On That Site ## Make a Tech Blog On That Site
This has been the single biggest thing to help me grow professionally. I regularly This has been the single biggest thing to help me grow professionally. I regularly
put [articles](https://christine.website/blog) on my blog, sometimes not even about put [articles](https://xeiaso.net/blog) on my blog, sometimes not even about
technology topics. Even if you are writing about your take on something people have technology topics. Even if you are writing about your take on something people have
already written about, it's still good practice. Your early posts are going to be already written about, it's still good practice. Your early posts are going to be
rough. It's normal to not be an expert when starting out in a new skill. rough. It's normal to not be an expert when starting out in a new skill.

View File

@ -54,7 +54,7 @@ by it. That attempt to come out failed and I was put into Christian
writing down my thoughts in a journal to this day. writing down my thoughts in a journal to this day.
So that day I hit "send" on [the So that day I hit "send" on [the
email](https://christine.website/blog/coming-out-2015-12-01) was mortally email](https://xeiaso.net/blog/coming-out-2015-12-01) was mortally
terrifying. All that fear from so long ago came raging up to the surface and I terrifying. All that fear from so long ago came raging up to the surface and I
was left in a crying and vulnerable state. However it ended up being a good kind was left in a crying and vulnerable state. However it ended up being a good kind
of cry, the healing kind. of cry, the healing kind.

View File

@ -21,7 +21,7 @@ named [dyson][dyson] in order to help me manage Terraform as well as create
Kubernetes manifests from [a template][template]. This works for the majority of Kubernetes manifests from [a template][template]. This works for the majority of
my apps, but it is difficult to extend at this point for a few reasons: my apps, but it is difficult to extend at this point for a few reasons:
[cultk8s]: https://christine.website/blog/the-cult-of-kubernetes-2019-09-07 [cultk8s]: https://xeiaso.net/blog/the-cult-of-kubernetes-2019-09-07
[dyson]: https://github.com/Xe/within-terraform/tree/master/dyson [dyson]: https://github.com/Xe/within-terraform/tree/master/dyson
[template]: https://github.com/Xe/within-terraform/blob/master/dyson/src/dysonPkg/deployment_with_ingress.yaml [template]: https://github.com/Xe/within-terraform/blob/master/dyson/src/dysonPkg/deployment_with_ingress.yaml

View File

@ -0,0 +1,205 @@
---
title: Writing Coherently At Scale
date: 2022-06-29
tags:
- writing
vod:
youtube: https://youtu.be/pDOoqqu06-8
twitch: https://www.twitch.tv/videos/1513874389
---
As someone who does a lot of writing, I have been asked how to write about
things. I have been asked about it enough that I am documenting this here so you
can all understand my process. This is not a prescriptive system that you must
do in order to make Quality Content™️, this is what I do.
<xeblog-conv name="Cadey" mood="coffee">I honestly have no idea if this is a
"correct" way of doing things, but it seems to work well enough. Especially so
if you are reading this.</xeblog-conv>
<xeblog-hero file="great-wave-cyberpunk" prompt="the great wave off of kanagawa, cyberpunk, hanzi inscription"></xeblog-hero>
## The Planning Phase
To start out the process of writing about something, I usually like to start
with the end goal in mind. If I am writing about an event or technology thing,
I'll start out with a goal that looks something like this:
> Explain a split DNS setup and its advantages and weaknesses so that people can
> make more informed decisions about their technical setups.
It doesn't have to be very complicated or intricate. Most of the complexity
comes up naturally during the process of writing the intermediate steps. Think
about the end goal or what you want people to gain from reading the article.
I've also found it helps to think about the target audience and assumed skills
of the reader. I'll usually list out the kind of person that would benefit from
this the most and how it will help them. Here's an example:
> The reader is assumed to have some context about what DNS is and wants to help
> make their production environment more secure, but isn't totally clear on how
> it helps and what tradeoffs are made.
State what the reader is to you and how the post benefits them. Underthink it.
It's tempting to overthink this, but really don't. You can overthink the
explanations later.
### The Outline
Once I have an end goal and the target audience in mind, then I make an outline
of what I want the post to contain. This outline will have top level items for
generic parts of the article or major concepts/steps and then I will go in and
add more detail inside each top level item. Here is an example set of top level
items for that split DNS post:
```markdown
- Introduction
- Define split DNS
- How split DNS is different
- Where you can use split DNS
- Advantages of split DNS
- Tradeoffs of split DNS
- Conclusion
```
Each step should build on the last and help you reach towards the end goal.
After I write the top level outline, I start drilling down into more detail. As
I drill down into more detail about a thing, the bullet points get nested
deeper, but when topics change then I go down a line. Here's an example:
```markdown
- Introduction
- What is DNS?
- Domain Name Service
- Maps names to IP addresses
- Sometimes it does other things, but we're not worrying about that today
- Distributed system
- Intended to have the same data everywhere in the world
- It can take time for records to be usable from everywhere
```
Then I will go in and start filling in the bullet tree with links and references
to each major concept or other opinions that people have had about the topic.
For example:
```markdown
- Introduction
- What is DNS?
- Domain Name Service
- https://datatracker.ietf.org/doc/html/rfc1035
- Maps names to IP addresses
- Sometimes it does other things, but we're not worrying about that today
- Distributed system
- Intended to have the same data everywhere in the world
- It can take time for records to be usable from everywhere
- https://jvns.ca/blog/2021/12/06/dns-doesn-t-propagate/
```
These help me write about the topic and give me links to add to the post so that
people can understand more if they want to. You should spend most of your time
writing the outline. The rest is really just restating the outline in sentences.
## Writing The Post
After each top level item is fleshed out enough, I usually pick somewhere to
start and add some space after a top level item. Then I just start writing. Each
top level item usually maps to a few paragraphs / a section of the post. I
usually like to have each section have its own little goal / context to it so
that readers start out from not understanding something and end up understanding
it better. Here's an example:
> If you have used a computer in the last few decades or so, you have probably
> used the Domain Name Service (DNS). DNS maps human-readable names (like
> `google.com`) to machine-readable IP addresses (like `182.48.247.12`). Because
> of this, DNS is one of the bedrock protocols of the modern internet and it
> usually is the cause of most failures in big companies.
>
> DNS is a globally distributed system without any authentication or way to
> ensure that only authorized parties can query IP addresses for specific domain
> names. As a consequence of this, this means that anyone can get the IP address
> of a given server if they have the DNS name for it. This also means that
> updating a DNS record can take a nontrivial amount of time to be visible from
> everywhere in the world.
>
> Instead of using public DNS records for internal services, you can set up a
> split DNS configuration so that you run an internal DNS server that has your
> internal service IP addresses obscured away from the public internet. This
> means that attackers can't get their hands on the IP addresses of your
> services so that they are harder to attack. In this article, I'm going to
> spell out how this works, the advantages of this setup, the tradeoffs made in
> the process and how you can implement something like this for yourself.
In the process of writing, I will find gaps in the outline and just fix it by
writing more words than the outline suggested. This is okay, and somewhat
normal. Go with the flow.
I expand each major thing into its component paragraphs and will break things up
into sections with markdown headers if there is a huge change in topics. Adding
section breaks can also help people stay engaged with the post. Giant walls of
text are hard to read and can make people lose focus easily.
Another trick I use to avoid my posts being giant walls of text is what I call
"conversation snippets". These look like this:
<xeblog-conv name="Mara" mood="hacker">These are words and I am saying
them!</xeblog-conv>
I use them for both creating [Socratic
dialogue](https://en.wikipedia.org/wiki/Socratic_dialogue) and to add prose
flair to my writing. I am more of a prose writer [by
nature](https://xeiaso.net/blog/the-oasis), and I find that this mix allows me
to keep both technical and artistic writing up to snuff.
<xeblog-conv name="Cadey" mood="enby">Amusingly, I get asked if the characters
in my blog are separate people all giving their input into things. They are
characters, nothing more. If you ever got an impression otherwise, then I have
done my job as a writer _incredibly well_.</xeblog-conv>
Just flesh things out and progressively delete parts of the outline as you go.
It gets easier.
### Writing The Conclusion
I have to admit, I really suck at writing conclusions. They are annoying for me
to write because I usually don't know what to put there. Sometimes I won't even
write a conclusion at all and just end the article there. This doesn't always
work though.
A lot of the time when I am describing how to do things I will end the article
with a "call to action". This is a few sentences that encourages the reader to
try the thing that I've been writing about out for themselves. If I was turning
that split DNS article from earlier into a full article, the conclusion
could look something like this:
> ---
>
> If you want an easy way to try out a split DNS configuration, install
> [Tailscale](https://tailscale.com/) on a couple virtual machines and enable
> [MagicDNS](https://tailscale.com/kb/1081/magicdns/). This will set up a split
> DNS configuration with a domain that won't resolve globally, such as
> `hostname.example.com.beta.tailscale.net`, or just `hostname` for short.
>
> I use this in my own infrastructure constantly. It has gotten to the point
> where I regularly forget that Tailscale is involved at all, and become
> surprised when I can't just access machines by name.
>
> A split DNS setup isn't a security feature (if anything, it's more of an
> obscurity feature), but you can use it to help administrate your systems by
> making your life easier. You can update records on your own schedule and you
> don't have to worry about outside attackers getting the IP addresses of your
> services.
I don't like giving the conclusion a heading, so I'll usually use a [horizontal
rule (`---` or `<hr
/>`)](https://www.coffeecup.com/help/articles/what-is-a-horizontal-rule/) to
break it off.
---
This is how I write about things. Do you have a topic in mind that you have
wanted to write about for a while? Try this system out! If you get something
that you like and want feedback on how to make it shine, email me at
`iwroteanarticle at xeserv dot us` with either a link to it or the draft
somehow. I'll be sure to read it and reply back with both what I liked and some
advice on how to make it even better.

View File

@ -82,7 +82,7 @@ terrible idea. Microservices architectures are not planned. They are an
evolutionary result, not a fully anticipated feature. evolutionary result, not a fully anticipated feature.
Finally, don’t “design for the future”. The future [hasn’t happened Finally, don’t “design for the future”. The future [hasn’t happened
yet](https://christine.website/blog/all-there-is-is-now-2019-05-25). Nobody yet](https://xeiaso.net/blog/all-there-is-is-now-2019-05-25). Nobody
knows how it’s going to turn out. The future is going to happen, and you can knows how it’s going to turn out. The future is going to happen, and you can
either adapt to it as it happens in the Now or fail to. Don’t make things overly either adapt to it as it happens in the Now or fail to. Don’t make things overly
modular, that leads to insane things like dynamically linking parts of an modular, that leads to insane things like dynamically linking parts of an

View File

@ -279,7 +279,7 @@ step.
The deploy step does two small things. First, it installs The deploy step does two small things. First, it installs
[dhall-yaml](https://github.com/dhall-lang/dhall-haskell/tree/master/dhall-yaml) [dhall-yaml](https://github.com/dhall-lang/dhall-haskell/tree/master/dhall-yaml)
for generating the Kubernetes manifest (see for generating the Kubernetes manifest (see
[here](https://christine.website/blog/dhall-kubernetes-2020-01-25)) and then [here](https://xeiaso.net/blog/dhall-kubernetes-2020-01-25)) and then
runs runs
[`scripts/release.sh`](https://tulpa.dev/cadey/printerfacts/src/branch/master/scripts/release.sh): [`scripts/release.sh`](https://tulpa.dev/cadey/printerfacts/src/branch/master/scripts/release.sh):

View File

@ -67,7 +67,7 @@ Hopefully Valve can improve the state of VR on Linux with the "deckard".
2021 has had some banger releases. Halo Infinite finally dropped. Final Fantasy 2021 has had some banger releases. Halo Infinite finally dropped. Final Fantasy
7 Remake came to PC. [Metroid 7 Remake came to PC. [Metroid
Dread](https://christine.website/blog/metroid-dread-review-2021-10-10) finally Dread](https://xeiaso.net/blog/metroid-dread-review-2021-10-10) finally
came out after being rumored for more than half of my lifetime. Forza Horizon 5 came out after being rumored for more than half of my lifetime. Forza Horizon 5
raced out into the hearts of millions. Overall, it was a pretty good year to be raced out into the hearts of millions. Overall, it was a pretty good year to be
a gamer. a gamer.

View File

@ -0,0 +1,95 @@
---
title: "Fly.io: the Reclaimer of Heroku's Magic"
date: 2022-05-15
tags:
- flyio
- heroku
vod:
twitch: https://www.twitch.tv/videos/1484123245
youtube: https://youtu.be/BAgzkKpLVt4
---
Heroku was catalytic to my career. It's been hard to watch the fall from grace.
Don't get me wrong, Heroku still _works_, but it's obviously been in maintenance
mode for years. When I worked there, there was a goal that just kind of grew in
scope over and over without reaching an end state: the Dogwood stack.
In Heroku each "stack" is the substrate the dynos run on. It encompasses the AWS
runtime, the HTTP router, the logging pipeline and a bunch of the other
infrastructure like the slug builder and the deployment infrastructure. The
three stacks Heroku has used are named after trees: Aspen, Bamboo and Cedar.
Every Heroku app today runs on the Cedar stack, and compared to Bamboo it was a
generational leap in capability. Cedar was what introduced buildpacks and
support for any language under the sun. Prior stacks railroaded you into Ruby on
Rails (Heroku used to be a web IDE for making Rails apps). However there were
always plans to improve with another generational leap. This ended up being
called the "Dogwood stack", but Dogwood never totally materialized because it
was too ambitious for Heroku to handle post-acquisition. Parts of Dogwood's
roadmap ended up being used in the implementation of Private Spaces, but as a
whole I don't expect Dogwood to materialize in Heroku in the way we all had
hoped.
However, I can confidently say that [fly.io](https://fly.io) seems like a viable
inheritor of the mantle of responsibility that Heroku has left into the hands of
the cloud. fly.io is a Platform-as-a-Service that hosts your applications on top
of physical dedicated servers run all over the world instead of being a reseller
of AWS. This allows them to get your app running in multiple regions for a lot
less than it would cost to run it on Heroku. They also use anycasting to allow
your app to use the same IP address globally. The internet itself will load
balance users to the nearest instance using BGP as the load balancing
substrate.
<xeblog-conv name="Cadey" mood="enby">People have been asking me what I would
suggest using instead of Heroku. I have been unable to give a good option until
now. If you are dissatisfied with the neglect of Heroku in the wake of the
Salesforce acquisition, take a look at fly.io. Its free tier is super generous.
I worked at Heroku and I am beyond satisfied with it. I'm considering using it
for hosting some personal services that don't need something like
NixOS.</xeblog-conv>
Applications can be built either using [cloud native
buildpacks](https://fly.io/docs/reference/builders/), Dockerfiles or arbitrary
docker images that you generated with something like Nix's
`pkgs.dockerTools.buildLayeredImage`. This gives you freedom to do whatever you
want like the Cedar stack, but at a fraction of the cost. Its default instance
size is likely good enough to run the blog you are reading right now and would
be able to do that for $2 a month plus bandwidth costs (I'd probably estimate
that to be about $3-5, depending on how many times I get on the front page of
Hacker News).
You can have persistent storage in the form of volumes, poke the internal DNS
server fly.io uses for service discovery, run apps that use arbitrary TCP/UDP
ports (even a DNS server!), connect to your internal network over WireGuard, ssh
into your containers, and import Heroku apps into fly.io without having to
rebuild them. This is what the Dogwood stack should have been. This represents a
generational leap in the capabilities of what a Platform as a Service can do.
The stream VOD in the footer of this post contains my first impressions using
fly.io to try and deploy an app written with [Deno](https://deno.land) to the
cloud. I ended up creating a terrible CRUD app on stream using SQLite that
worked perfectly beyond expectations. I was able to _restart the app_ and my
SQLite database didn't get blown away. I could easily imagine myself combining
something like [litestream](https://litestream.io) into my docker images to
automate offsite backups of SQLite databases like this. It was magical.
<xeblog-conv name="Mara" mood="happy">If you've never really used Heroku, for
context each dyno has a mutable filesystem. However that filesystem gets blown
away every time a dyno reboots. Having something that is mutable and persistent
is mind-blowing.</xeblog-conv>
Everything else you expect out of Heroku works like you'd expect in fly.io. The
only things I can see missing are automated Redis hosting by the platform
(however this seems intentional as fly.io is generic enough [to just run redis
directly for you](https://fly.io/docs/reference/redis/)) and the marketplace.
The marketplace being absent is super reasonable, seeing as Heroku's marketplace
only really started existing as a result of them being the main game in town
with all the mindshare. fly.io is a voice among a chorus, so it's understandable
that it wouldn't have the same treatment.
Overall, I would rate fly.io as a worthy inheritor of Heroku's mantle as the
platform as a service that is just _magic_. It Just Works™️. There was no
fighting it at a platform level, it just worked. Give it a try.
<xeblog-conv name="Cadey" mood="enby">Don't worry
[@tqbf](https://twitter.com/tqbf), fly.io put in a good showing. I still wanna
meet you at some conference.</xeblog-conv>

View File

@ -8,7 +8,7 @@ series: conlangs
`h` is a conlang project that I have been working off and on for years. It is infinitely simply teachable, trivial to master and can be used to represent the entire scope of all meaning in any facet of the word. All with a single character. `h` is a conlang project that I have been working off and on for years. It is infinitely simply teachable, trivial to master and can be used to represent the entire scope of all meaning in any facet of the word. All with a single character.
This is a continuation from [this post](https://christine.website/blog/the-origin-of-h-2015-12-14). If this post makes sense to you, please let me know and/or schedule a psychologist appointment just to be safe. This is a continuation from [this post](https://xeiaso.net/blog/the-origin-of-h-2015-12-14). If this post makes sense to you, please let me know and/or schedule a psychologist appointment just to be safe.
## Phonology ## Phonology

View File

@ -363,14 +363,14 @@ my blog's [JSONFeed](/blog.json):
#!/usr/bin/env bash #!/usr/bin/env bash
# xeblog-post.sh # xeblog-post.sh
curl -s https://christine.website/blog.json | jq -r '.items[0] | "\(.title) \(.url)"' curl -s https://xeiaso.net/blog.json | jq -r '.items[0] | "\(.title) \(.url)"'
``` ```
At the time of writing this post, here is the output I get from this command: At the time of writing this post, here is the output I get from this command:
``` ```
$ ./xeblog-post.sh $ ./xeblog-post.sh
Anbernic RG280M Review https://christine.website/blog/rg280m-review Anbernic RG280M Review https://xeiaso.net/blog/rg280m-review
``` ```
What else could you do with pipes and redirection? The cloud's the limit! What else could you do with pipes and redirection? The cloud's the limit!

View File

@ -16,7 +16,7 @@ it. This is a sort of spiritual successor to my old
ecosystem since then, as well as my understanding of the language. ecosystem since then, as well as my understanding of the language.
[go]: https://golang.org [go]: https://golang.org
[gswg]: https://christine.website/blog/getting-started-with-go-2015-01-28 [gswg]: https://xeiaso.net/blog/getting-started-with-go-2015-01-28
Like always, feedback is very welcome. Any feedback I get will be used to help Like always, feedback is very welcome. Any feedback I get will be used to help
make this book even better. make this book even better.

View File

@ -0,0 +1,565 @@
---
title: Crimes with Go Generics
date: 2022-04-24
tags:
- cursed
- golang
- generics
vod:
twitch: https://www.twitch.tv/videos/1465727432
youtube: https://youtu.be/UiJtaKYQnzg
---
Go 1.18 added [generics](https://go.dev/doc/tutorial/generics) to the
language. This allows you to have your types take types as parameters
so that you can create composite types (types out of types). This lets
you get a lot of expressivity and clarity about how you use Go.
However, if you are looking for good ideas on how to use Go generics,
this is not the post for you. This is full of bad ideas. This post is
full of ways that you should not use Go generics in production. Do not
copy the examples in this post into production. By reading this post
you agree to not copy the examples in this post into production.
I have put my code for this article [on my git
server](https://tulpa.dev/internal/gonads). This repo has been
intentionally designed to be difficult to use in production by me
taking the following steps:
1. I have created it under a Gitea organization named `internal`. This
will make it impossible for you to import the package unless you
are using it from a repo on my Gitea server. Signups are disabled
on that Gitea server. See
[here](https://go.dev/doc/go1.4#internalpackages) for more
information about the internal package rule.
1. The package documentation contains a magic comment that will make
Staticcheck and other linters complain that you are using this
package even though it is deprecated.
<xeblog-conv name="Mara" mood="hmm">What is that package
name?</xeblog-conv>
<xeblog-conv name="Cadey" mood="enby">It's a reference to
Haskell's monads, but adapted to Go as a pun.</xeblog-conv>
<xeblog-conv name="Numa" mood="delet">A gonad is just a gonoid in the
category of endgofunctors. What's there to be confused
about?</xeblog-conv>
<xeblog-conv name="Cadey" mood="facepalm">\*sigh\*</xeblog-conv>
## `Queue[T]`
To start things out, let's show off a problem in computer science that
is normally difficult. Let's make a MPMS (multiple producer, multiple
subscriber) queue.
First we are going to need a struct to wrap everything around. It will
look like this:
```go
type Queue[T any] struct {
data chan T
}
```
This creates a type named `Queue` that takes a type argument `T`. This
`T` can be absolutely anything, but the only requirement is that the
data is a Go type.
You can create a little constructor for `Queue` instances with a
function like this:
```go
func NewQueue[T any](size int) Queue[T] {
return Queue[T]{
data: make(chan T, size),
}
}
```
Now let's make some methods on the `Queue` struct that will let us
push to the queue and pop from the queue. They could look like this:
```go
func (q Queue[T]) Push(val T) {
q.data <- val
}
func (q Queue[T]) Pop() T {
return <-q.data
}
```
These methods will let you put data at the end of the queue and then
pull it out from the beginning. You can use them like this:
```go
q := NewQueue[string](5)
q.Push("hi there")
str := q.Pop()
if str != "hi there" {
panic("string is wrong")
}
```
This is good, but the main problem comes from trying to pop from an
empty queue. It'll stay there forever doing nothing. We can use the
`select` statement to allow us to write a nonblocking version of the
`Pop` function:
```go
func (q Queue[T]) TryPop() (T, bool) {
select {
case val := <-q.data:
return val, true
default:
return nil, false
}
}
```
However when we try to compile this, we get an error:
```
cannot use nil as T value in return statement
```
In that code, `T` can be _anything_, including values that may not be
able to be `nil`. We can work around this by taking advantage of the
`var` statement, which makes a new variable and initializes it to the
zero value of that type:
```go
func Zero[T any]() T {
var zero T
return zero
}
```
When we run the `Zero` function like
[this](https://go.dev/play/p/Z5tRs1-aKBU):
```go
log.Printf("%q", Zero[string]())
log.Printf("%v", Zero[int]())
```
We get output that looks like this:
```
2009/11/10 23:00:00 ""
2009/11/10 23:00:00 0
```
So we can adapt the `default` branch of `TryPop` to this:
```go
func (q Queue[T]) TryPop() (T, bool) {
select {
case val := <-q.data:
return val, true
default:
var zero T
return zero, false
}
}
```
And finally write a test for good measure:
```go
func TestQueue(t *testing.T) {
q := NewQueue[int](5)
for i := range make([]struct{}, 5) {
q.Push(i)
}
for range make([]struct{}, 5) {
t.Log(q.Pop())
}
}
```
## `Option[T]`
In Go, people use pointer values for a number of reasons:
1. A pointer value may be `nil`, so this can signal that the value may
not exist.
1. A pointer value only stores the offset in memory, so passing around
the value causes Go to only copy the pointer instead of copying the
value being passed around.
1. A pointer value being passed to a function lets you mutate values
in the value being passed. Otherwise Go will copy the value and you
can mutate it all you want, but the changes you made will not
persist past that function call. You can sort of consider this to
be "immutable", but it's not as strict as something like passing
`&mut T` to functions in Rust.
This `Option[T]` type will help us model the first kind of constraint:
a value that may not exist. We can define it like this:
```go
type Option[T any] struct {
val *T
}
```
Then you can define a couple methods to use this container:
```go
var ErrOptionIsNone = errors.New("gonads: Option[T] has no value")
func (o Option[T]) Take() (T, error) {
if o.IsNone() {
var zero T
return zero, ErrOptionIsNone
}
return *o.val, nil
}
func (o *Option[T]) Set(val T) {
o.val = &val
}
func (o *Option[T]) Clear() {
o.val = nil
}
```
Some other functions that will be useful will be an `IsSome` function
to tell if the `Option` contains a value. We can use this to also
implement an `IsNone` function that will let you tell if that `Option`
_does not_ contain a value. They will look like this:
```go
func (o Option[T]) IsSome() bool {
return o.val != nil
}
func (o Option[T]) IsNone() bool {
return !o.IsSome()
}
```
We can say that if an Option does not have something in it, it has
nothing in it. This will let us use `IsSome` to implement `IsNone`.
Finally we can add all this up to a `Yank` function, which is similar
to
[`Option::unwrap()`](https://doc.rust-lang.org/rust-by-example/error/option_unwrap.html)
in Rust:
```go
func (o Option[T]) Yank() T {
if o.IsNone() {
panic("gonads: Yank on None Option")
}
return *o.val
}
```
This will all be verified in a Go test:
```go
func TestOption(t *testing.T) {
o := NewOption[string]()
val, err := o.Take()
if err == nil {
t.Fatalf("[unexpected] wanted no value out of Option[T], got: %v", val)
}
o.Set("hello friendos")
_, err = o.Take()
if err != nil {
t.Fatalf("[unexpected] wanted no value out of Option[T], got: %v", err)
}
o.Clear()
if o.IsSome() {
t.Fatal("Option should have none, but has some")
}
}
```
<xeblog-conv name="Mara" mood="hmm">I think that
<code>Option[T]</code> will be the most useful outside of this post.
It will need some work and generalization, but this may be something
that the Go team will have to make instead of some random
person.</xeblog-conv>
## `Thunk[T]`
In computer science we usually deal with values and computations.
Usually we deal with one or the other. Sometimes computations can be
treated as values, but this is very rare. It's even more rare to take
a partially completed computation and use it as a value.
A thunk is a partially evaluated computation that is stored as a
value. For an idea of what I'm talking about, let's consider this
JavaScript function:
```javascript
const add = (x, y) => x + y;
console.log(add(2, 2)); // 4
```
This creates a function called `add` that takes two arguments and
returns one argument. This is great in many cases, but it makes it
difficult for us to bind only one argument to the function and leave
the other as a variable input. What if computing the left hand side of
`add` is expensive and only needed once?
Instead we can write `add` like this:
```javascript
const add = (x) => (y) => x + y;
console.log(add(2)(2)); // 4
```
This also allows us to make partially evaluated forms of `add` like
`addTwo`:
```javascript
const addTwo = add(2);
console.log(addTwo(3)); // 5
```
This can also be used with functions that do not take arguments, so
you can pass around a value that isn't computed yet and then only
actually compute it when needed:
```javascript
const hypotenuse = (x, y) => Math.sqrt(x * x + y * y);
const thunk = () => hypot(3, 4);
```
You can then pass this thunk to functions _without having to evaluate
it_ until it is needed:
```javascript
dominateWorld(thunk); // thunk is passed as an unevaluated function
```
We can implement this in Go by using a type like the following:
```go
type Thunk[T any] struct {
doer func() T
}
```
And then force the thunk to evaluate with a function such as `Force`:
```go
func (t Thunk[T]) Force() T {
return t.doer()
}
```
This works, however we can also go one step further than we did with
the JavaScript example. We can take advantage of the `Thunk[T]`
container to cache the result of the `doer` function so that calling
it multiple times will only actually it once and return the same
result.
<xeblog-conv name="Mara" mood="hacker">Keep in mind that this will
only work for _pure functions_, or functions that don't modify the
outside world. This isn't just global variables either, but any
function that modifies any state anywhere, including network and
filesystem IO.</xeblog-conv>
This would make `Thunk[T]` be implemented like this:
```go
type Thunk[T any] struct {
doer func() T // action being thunked
o *Option[T] // cache for complete thunk data
}
func (t *Thunk[T]) Force() T {
if t.o.IsSome() {
return t.o.Yank()
}
t.o.Set(t.doer())
return t.o.Yank()
}
func NewThunk[T any](doer func() T) *Thunk[T] {
return &Thunk[T]{
doer: doer,
o: NewOption[T](),
}
}
```
Now, for an overcomplicated example you can use this to implement the
Fibonacci function. We can start out by writing a naiive Fibonacci
function like this:
```go
func Fib(n int) int {
if n <= 1 {
return n
}
return Fib(n-1) + Fib(n-2)
}
```
We can turn this into a Go test in order to see how long it takes for
it to work:
```go
func TestRecurFib(t *testing.T) {
t.Log(Fib(40))
}
```
Then when we run `go test`:
```console
$ go test -run RecurFib
=== RUN TestRecurFib
thunk_test.go:15: 102334155
--- PASS: TestRecurFib (0.36s)
```
However, we can make this a lot more complicated with the power of the
`Thunk[T]` type:
```go
func TestThunkFib(t *testing.T) {
cache := make([]*Thunk[int], 41)
var fib func(int) int
fib = func(n int) int {
if cache[n].o.IsSome() {
return *cache[n].o.val
}
return fib(n-1) + fib(n-2)
}
for i := range cache {
i := i
cache[i] = NewThunk(func() int { return fib(i) })
}
cache[0].o.Set(0)
cache[1].o.Set(1)
t.Log(cache[40].Force())
}
```
And then run the test:
```
=== RUN TestThunkFib
thunk_test.go:36: 102334155
--- PASS: TestThunkFib (0.60s)
```
<xeblog-conv name="Mara" mood="hmm">Why is this so much slower? This
should be caching the intermediate values. Maybe something like this
would be faster? This should complete near instantly,
right?</xeblog-conv>
```go
func TestMemoizedFib(t *testing.T) {
mem := map[int]int{
0: 0,
1: 1,
}
var fib func(int) int
fib = func(n int) int {
if result, ok := mem[n]; ok {
return result
}
result := fib(n-1) + fib(n-2)
mem[n] = result
return result
}
t.Log(fib(40))
}
```
```console
$ go test -run Memoized
=== RUN TestMemoizedFib
thunk_test.go:35: 102334155
--- PASS: TestMemoizedFib (0.00s)
```
<xeblog-conv name="Cadey" mood="enby">I'm not sure
either.</xeblog-conv>
If you change the `fib` function to this, it works, but it also steps
around the `Thunk[T]` type:
```go
fib = func(n int) int {
if cache[n].o.IsSome() {
return *cache[n].o.val
}
result := fib(n-1) + fib(n-2)
cache[n].o.Set(result)
return result
}
```
This completes instantly:
```
=== RUN TestThunkFib
thunk_test.go:59: 102334155
--- PASS: TestThunkFib (0.00s)
```
To be clear, this isn't the fault of Go generics. I'm almost certain
that my terrible code is causing this to be much slower.
<xeblog-conv name="Numa" mood="delet">This is the power of gonads:
making easy code complicated, harder to reason about and slower than
the naiive approach! Why see this as terrible code when it creates an
amazing opportunity for cloud providers to suggest that people use
gonads' `Thunk[T]` so that they use more CPU and then have to pay cloud
providers more money for CPU! Think about the children!</xeblog-conv>
---
<xeblog-conv name="Cadey" mood="coffee">EDIT(2022 M04 25 05:56): amscanne on
Hacker News pointed out that my code was in fact wrong. My `fib` function should
have been a lot simpler.</xeblog-conv>
```go
fib = func(n int) int {
return cache[n-1].Force() + cache[n-2].Force()
}
```
<xeblog-conv name="Cadey" mood="facepalm">Applying this also makes the code run
instantly as I'd expect. I knew _something_ was _very wrong_, but I never
expected something this stupid. Thanks amscanne!</xeblog-conv>
<xeblog-conv name="Numa" mood="happy">Hey, it makes for good surrealism. If that
isn't a success, what is?</xeblog-conv>
---
I'm glad that Go has added generics to the language. It's certainly
going to make a lot of things a lot easier and more expressive. I'm
worried that the process of learning how to use generics in Go is
going to create a lot of churn and toil as people get up to speed on
when and where they should be used. These should be used in specific
cases, not as a bread and butter tool.
I hope this was an interesting look into how you can use generics in
Go, but again please don't use these examples in production.

View File

@ -20,16 +20,16 @@ for browsers, but I've been using it for server-side tasks.
I have written more about/with WebAssembly in the past in these posts: I have written more about/with WebAssembly in the past in these posts:
- https://christine.website/talks/webassembly-on-the-server-system-calls-2019-05-31 - https://xeiaso.net/talks/webassembly-on-the-server-system-calls-2019-05-31
- https://christine.website/blog/olin-1-why-09-1-2018 - https://xeiaso.net/blog/olin-1-why-09-1-2018
- https://christine.website/blog/olin-2-the-future-09-5-2018 - https://xeiaso.net/blog/olin-2-the-future-09-5-2018
- https://christine.website/blog/land-1-syscalls-file-io-2018-06-18 - https://xeiaso.net/blog/land-1-syscalls-file-io-2018-06-18
- https://christine.website/blog/templeos-2-god-the-rng-2019-05-30 - https://xeiaso.net/blog/templeos-2-god-the-rng-2019-05-30
This is a continuation of the following two posts: This is a continuation of the following two posts:
- https://christine.website/blog/the-origin-of-h-2015-12-14 - https://xeiaso.net/blog/the-origin-of-h-2015-12-14
- https://christine.website/blog/formal-grammar-of-h-2019-05-19 - https://xeiaso.net/blog/formal-grammar-of-h-2019-05-19
All of the relevant code for h is [here](https://github.com/Xe/x/tree/master/cmd/h). All of the relevant code for h is [here](https://github.com/Xe/x/tree/master/cmd/h).

View File

@ -0,0 +1,102 @@
---
title: "I Miss Heroku's DevEx"
date: 2022-05-12
---
If you've never really experienced it before, it's gonna sound really weird.
Basically the main way that Heroku worked is that they would set up a git remote
for each "app" it hosted. Each "app" had its source code in a git repo and a
"Procfile" that told Heroku what to do with it. So when it came time to deploy
that app, you'd just `git push heroku main` and then Heroku would just go off
and build that app and run it _somewhere_ in the cloud. You got back a HTTPS URL
and then bam you have a website.
The developer experience didn't stop there. Most of how Heroku apps are
configured are via environment variables, and there were addons that let you
tell Heroku things like "hi yes I would like one (1) postgres please" and the
platform would spin up a database somewhere and drop a config variable into the
app's config. It was magic. Things just worked and it left you free to go do
what made you money.
Heroku's free tier got me the in I needed to make my career really start. If I
didn't have something like Heroku in my life I doubt that my career would be the
same or even I would be the same person I am today. It's really hard to describe
what having access to a platform that lets you turn ideas into production
quality code does to your output ability. I even ended up reinventing Heroku a
few times in my career (working for Deis and later reinventing most of the core
of Heroku as a project between jobs), but nothing really hit that same level of
wonder/magic that Heroku did.
I ended up working there and when I did I understood why Heroku had fallen so
much. Heroku is owned by Salesforce and Salesforce doesn't really understand
what they had acquired with Heroku. Heroku had resisted integration into the
larger Salesforce organization and as a result was really really starved for
headcount. I had to have a come-to-jesus meeting with the CTO of Heroku where I
spelled out my medical needs and how the insurance that the contracting agency
they were using was insufficent (showing comparisons between bills for blood
draws where paying with the insurance ended up costing me more than not using
it). I got hired and then that was just in time for Salesforce to really start
pulling Heroku into the fold.
The really great part about working at Heroku was that setting up a new service
was so easy that the majority of the productionalization checklist was just
enabling hidden feature flags to lock down the app. I'm surprised that didn't
get streamlined.
The Heroku I joined no longer exists. I joined Heroku but I left Salesforce. I
can't blame any of my coworkers from Heroku from fleeing the sinking ship. The
ship has been sinking for years but the culture of Heroku really stuck around
long enough that it was hard to realize the ship was sinking.
It can really be seen with how long it's taken Heroku to react to [that one
horrible security event](https://status.heroku.com/incidents/2413) they've been
dealing with. Based on what I remember about the internal architecture (it was a
microservices tire fire unlike you have ever seen, it's part of the inspiration
that lead me to write [this post](/blog/make-microservices-cluster-2022-01-27))
and the notes that have been put on the public facing status page, I'm guessing
that most of Heroku is "legacy" code (IE: nobody on the team that made this
service works here anymore) at this point. When I was there most of the services
on my team were "legacy" code that was production-facing, load-bearing and
overall critical to the company succeeding; but it was built to be reliable
enough that we could overall ignore it until it was actually falling over. But
then because of the ways that things were chorded together it could take a very
long time to actually fix issues because the symptoms were all over the place.
Don't get me wrong, I loved working there but it was mostly for the people. That
and the ability to say that I helped make Heroku better for the next generation.
If you've ever used the metrics tab on Heroku, chances are that you've
encountered my code indirectly. If you've ever done Heroku threshold autoscaling
or response time alerting, you've dealt with code I helped write. The body of
Heroku remains but the soul has long since fled.
At the few points of my career that I have tried to reinvent Heroku (be it on my
own or working for a company doing that), there has mostly been this weird
realization that in order to have a thing like Heroku exist it really needs to
be hosted by someone else in the cloud. One of the places I worked for was
selling self-hosted Heroku on top of CoreOS and fleetd (remember fleetd? that
was magical) and while it did have a lot of the same developer experience, it
never really had the same magic feeling. I had the same problem with my own
implementation. Sure you can get the app hosting part of Heroku fairly easily
(and with Docker being as mature as it was at that point yeah it was fairly
easy). But when it comes to the real experience of addons and the whole
ecosystem there, you really need either to get very lucky or become an industry
standard. Realistically though, you aren't going to be either lucky or an
industry standard and then you need to also reinvent the next 80% of Heroku from
scratch on hardware that you don't control. It's no wonder that ultimately
failed (even though one of them was bought out by Microsoft after doing a weird
Kubernetes pivot).
There was something really magical about the whole thing that I really miss to
this day. Heroku was at least a decade ahead of its time as far as developer
experience goes. Things Just Worked in ways that would probably put a lot of us
out of jobs if they really took off. I miss the process for putting something on
the internet to just be a `git push` and trust that the machine will just take
care of it. I wonder if we'll ever really have something like that on top of Nix
or NixOS.
---
If you're reading this before the 12th, welcome to an experiment! I've been
wondering about how to make some of my posts Patreon exclusive for a week. This
post was published for my patrons on the 5th of May. Please don't share this
link around on social media until the 12th, but privately sharing it is okay.

View File

@ -14,7 +14,7 @@ goes into hitting enter on christine.website and this website being loaded.
## Beginnings ## Beginnings
The user types in `https://christine.website` into the address bar and hits The user types in `https://xeiaso.net` into the address bar and hits
enter on the keyboard. This sends a signal over USB to the computer and the enter on the keyboard. This sends a signal over USB to the computer and the
kernel polls the USB controller for a new message. It's recognized as from the kernel polls the USB controller for a new message. It's recognized as from the
keyboard. The input is then sent to the browser through an input driver talking keyboard. The input is then sent to the browser through an input driver talking

View File

@ -46,7 +46,7 @@ the Rust compiler.
[nixos]: https://nixos.org/nixos/ [nixos]: https://nixos.org/nixos/
[nix]: https://nixos.org/nix/ [nix]: https://nixos.org/nix/
[howistartnix]: https://christine.website/blog/how-i-start-nix-2020-03-08 [howistartnix]: https://xeiaso.net/blog/how-i-start-nix-2020-03-08
## A new project ## A new project

View File

@ -10,7 +10,7 @@ From time to time, I am outright wrong on my blog. This is one of those times.
In my [last post about Nix][nixpost], I didn't see the light yet. I think I do In my [last post about Nix][nixpost], I didn't see the light yet. I think I do
now, and I'm going to attempt to clarify below. now, and I'm going to attempt to clarify below.
[nixpost]: https://christine.website/blog/thoughts-on-nix-2020-01-28 [nixpost]: https://xeiaso.net/blog/thoughts-on-nix-2020-01-28
Let's talk about a more simple scenario: writing a service in Go. This service Let's talk about a more simple scenario: writing a service in Go. This service
will depend on at least the following: will depend on at least the following:

View File

@ -453,7 +453,7 @@ module. Here's how I do it:
You can add this to your `imports` in your server's `configuration.nix` using You can add this to your `imports` in your server's `configuration.nix` using
[the layout I described in this [the layout I described in this
post](https://christine.website/blog/morph-setup-2021-04-25). This would go in post](https://xeiaso.net/blog/morph-setup-2021-04-25). This would go in
the host-specific configuration folder. the host-specific configuration folder.
Once you've deployed this to a server, try to open the page in your browser: Once you've deployed this to a server, try to open the page in your browser:

View File

@ -308,19 +308,19 @@ And then you can register it in your `network.nix` like this:
This should help you get your servers wrangled into a somewhat consistent state. This should help you get your servers wrangled into a somewhat consistent state.
From here the following articles may be useful to give you ideas: From here the following articles may be useful to give you ideas:
- [Borg Backup Config](https://christine.website/blog/borg-backup-2021-01-09) - [Borg Backup Config](https://xeiaso.net/blog/borg-backup-2021-01-09)
- [Nixops Services On Your Home - [Nixops Services On Your Home
Network](https://christine.website/blog/nixops-services-2020-11-09) (just be Network](https://xeiaso.net/blog/nixops-services-2020-11-09) (just be
sure to ignore the part where it mentions `deployment.keys`, you can replace sure to ignore the part where it mentions `deployment.keys`, you can replace
it with the semantically identical it with the semantically identical
[`deployment.secrets`](https://github.com/DBCDK/morph/blob/master/examples/secrets.nix) [`deployment.secrets`](https://github.com/DBCDK/morph/blob/master/examples/secrets.nix)
as described in the morph documentation) as described in the morph documentation)
- [Prometheus and - [Prometheus and
Aegis](https://christine.website/blog/aegis-prometheus-2021-04-05) Aegis](https://xeiaso.net/blog/aegis-prometheus-2021-04-05)
- [My Automagic NixOS Wireguard - [My Automagic NixOS Wireguard
Setup](https://christine.website/blog/my-wireguard-setup-2021-02-06) Setup](https://xeiaso.net/blog/my-wireguard-setup-2021-02-06)
- [Encrypted Secrets with - [Encrypted Secrets with
NixOS](https://christine.website/blog/nixos-encrypted-secrets-2021-01-20) NixOS](https://xeiaso.net/blog/nixos-encrypted-secrets-2021-01-20)
Also feel free to dig around [the `common` folder of my `nixos-configs` Also feel free to dig around [the `common` folder of my `nixos-configs`
repo](https://github.com/Xe/nixos-configs/tree/master/common). There's a bunch repo](https://github.com/Xe/nixos-configs/tree/master/common). There's a bunch

View File

@ -3,6 +3,12 @@ title: My Career So Far in Dates/Titles/Salaries
date: 2019-03-14 date: 2019-03-14
--- ---
<div class="warning"><xeblog-conv name="Cadey" mood="coffee">This post is
outdated, see <a href="/salary-transparency">here</a> for more context on why
this data is made public. The table on this page will be automatically updated
to contain the data on my salary transparency page, but you should prefer that
page over this one when possible.</xeblog-conv></div>
Let this be inspiration to whoever is afraid of trying, failing and being fired. Let this be inspiration to whoever is afraid of trying, failing and being fired.
Every single one of these jobs has taught me lessons I've used daily in my Every single one of these jobs has taught me lessons I've used daily in my
career. career.
@ -26,20 +32,7 @@ might not want.
The following table is a history of my software career by title, date and salary The following table is a history of my software career by title, date and salary
(company names are omitted). (company names are omitted).
| Title | Start Date | End Date | Days Worked | Days Between Jobs | Salary | How I Left | <xeblog-salary-history></xeblog-salary-history>
|:----- |:---------- |:-------- |:----------- |:----------------- |:------ |:---------- |
| Junior Systems Administrator | November 11, 2013 | January 06, 2014 | 56 days | n/a | $50,000/year | Terminated |
| Software Engineering Intern | July 14, 2014 | August 27, 2014 | 44 days | 189 days | $35,000/year | Terminated |
| Consultant | September 17, 2014 | October 15, 2014 | 28 days | 21 days | $90/hour | Contract Lapsed |
| Consultant | October 27, 2014 | Feburary 9, 2015 | 105 days | 12 days | $90/hour | Contract Lapsed |
| Site Reliability Engineer | March 30, 2015 | March 7, 2016 | 343 days | 49 days | $125,000/year | Demoted |
| Systems Administrator | March 8, 2016 | April 1, 2016 | 24 days | 1 day | $105,000/year | Bad terms |
| Member of Technical Staff | April 4, 2016 | August 3, 2016 | 121 days | 3 days | $135,000/year | Bad terms |
| Software Engineer | August 24, 2016 | November 22, 2016 | 90 days | 21 days | $105,000/year | Terminated |
| Consultant | Feburary 13, 2017 | November 13, 2017 | 273 days | 83 days | don't remember | Hired |
| Senior Software Engineer | November 13, 2017 | March 8, 2019 | 480 days | 0 days | $150,000/year | Voulntary quit |
| Senior Site Reliability Expert | May 6, 2019 | October 27, 2020 | 540 days | 48 days | CAD$115,000/year (about USD$ 80k and change) | Voluntary quit |
| Software Designer | December 14, 2020 | *current* | n/a | n/a | CAD$135,000/year (about USD$ 105k and change) | n/a |
Even though I've been fired three times, I don't regret my career as it's been Even though I've been fired three times, I don't regret my career as it's been
thus far. I've been able to work on experimental technology integrating into thus far. I've been able to work on experimental technology integrating into

View File

@ -17,7 +17,7 @@ One thing that I do a lot is run virtual machines. Some of these stick around, a
lot of them are very ephemeral. I also like being able to get into these VMs lot of them are very ephemeral. I also like being able to get into these VMs
quickly if I want to mess around with a given distribution or OS. Normally I'd quickly if I want to mess around with a given distribution or OS. Normally I'd
run these on [my gaming run these on [my gaming
tower](https://christine.website/blog/nixos-desktop-flow-2020-04-25), however tower](https://xeiaso.net/blog/nixos-desktop-flow-2020-04-25), however
this makes my tower very load-bearing. I also want to play games sometimes on my this makes my tower very load-bearing. I also want to play games sometimes on my
tower, and even though there have been many strides in getting games to run well tower, and even though there have been many strides in getting games to run well
on Linux it's still not as good as I'd like it to be. on Linux it's still not as good as I'd like it to be.

View File

@ -58,7 +58,7 @@ la budza pu cusku lu
> May you be at peace. May you be happy. > May you be at peace. May you be happy.
- Buddha - Buddha
I will be reachable on the internet. See https://christine.website/contact to I will be reachable on the internet. See https://xeiaso.net/contact to
see contact information that will help you reach out to me. If you can, please see contact information that will help you reach out to me. If you can, please
direct replies to me@christine.website, that way I can read them after this direct replies to me@christine.website, that way I can read them after this
account gets disabled. account gets disabled.
@ -70,7 +70,7 @@ From my world to yours,
-- --
Christine Dodrill Christine Dodrill
https://christine.website https://xeiaso.net
``` ```
la budza pu cusku lu la budza pu cusku lu

View File

@ -3,7 +3,7 @@ title: New Site
date: 2016-12-18 date: 2016-12-18
--- ---
This post is now being brought to you by the new and improved [https://christine.website](https://christine.website). This post is now being brought to you by the new and improved [https://xeiaso.net](https://xeiaso.net).
This content is [markdown](/api/blog/post?name=new-site-2016-12-18) rendered by This content is [markdown](/api/blog/post?name=new-site-2016-12-18) rendered by
[Purescript](http://www.purescript.org/). The old [site](https://github.com/Xe/christine.website) [Purescript](http://www.purescript.org/). The old [site](https://github.com/Xe/christine.website)
is now being retired in favor of [this one](https://github.com/Xe/site). The old is now being retired in favor of [this one](https://github.com/Xe/site). The old

View File

@ -75,6 +75,9 @@ nix = {
Then rebuild your system and you can continue along with the article. Then rebuild your system and you can continue along with the article.
<xeblog-conv name="Mara" mood="hacker">EDIT: You can use WSL for this. See
[here](/blog/nix-flakes-4-wsl-2022-05-01) for more information.</xeblog-conv>
If you are not on NixOS, you will need to either edit `~/.config/nix/nix.conf` If you are not on NixOS, you will need to either edit `~/.config/nix/nix.conf`
or `/etc/nix/nix.conf` and add the following line to it: or `/etc/nix/nix.conf` and add the following line to it:
@ -204,12 +207,12 @@ Let's take a closer look at the higher level things in the flake:
your `flake.nix`. Ditto with "flake input" referring to the `inputs` attribute your `flake.nix`. Ditto with "flake input" referring to the `inputs` attribute
of your `flake.nix`.](conversation://Cadey/enby) of your `flake.nix`.](conversation://Cadey/enby)
When you ran `nix build` earlier, it defaulted to building the package in When you ran `nix build` earlier, it defaulted to building the `default` entry
`defaultPackage`. You can also build the `go-hello` package by running this in `packages`. You can also build the `default` package by running this
command: command:
```console ```console
$ nix build .#go-hello $ nix build .#default
``` ```
And if you want to build the copy I made for this post: And if you want to build the copy I made for this post:
@ -231,11 +234,13 @@ simplify that above `nix build` and `./result/bin/go-hello` cycle into a single
`go-hello` to be the default app: `go-hello` to be the default app:
```nix ```nix
# below defaultPackage # below packages
defaultApp = forAllSystems (system: { apps = forAllSystems (system: {
type = "app"; default = {
program = "${self.packages.${system}.go-hello}/bin/go-hello"; type = "app";
program = "${self.packages.${system}.default}/bin/go-hello";
};
}); });
``` ```
@ -270,16 +275,18 @@ project is using the same tools.
project folder I do not have any development tools project folder I do not have any development tools
available.](conversation://Cadey/enby) available.](conversation://Cadey/enby)
Flakes has the ability to specify this using the `devShell` flake output. You Flakes has the ability to specify this using the `devShells` flake output. You
can add it to your `flake.nix` using this: can add it to your `flake.nix` using this:
```nix ```nix
# after defaultApp # after apps
devShell = forAllSystems (system: devShells = forAllSystems (system:
let pkgs = nixpkgsFor.${system}; let pkgs = nixpkgsFor.${system};
in pkgs.mkShell { in {
buildInputs = with pkgs; [ go gopls goimports go-tools ]; default = pkgs.mkShell {
buildInputs = with pkgs; [ go gopls gotools go-tools ];
};
}); });
``` ```
@ -415,7 +422,7 @@ world. To use a private repo, your flake input URL should look something like
this: this:
``` ```
ssh+git://git@github.com:user/repo git+ssh://git@github.com/user/repo?ref=main
``` ```
[I'm pretty sure you could use private git repos outside of flakes, however it [I'm pretty sure you could use private git repos outside of flakes, however it

View File

@ -72,7 +72,7 @@ Everything else we'll cover today will build on top of this.
Let's look back at the Go [example Let's look back at the Go [example
package](https://github.com/Xe/gohello/blob/caf54cdff7d8dd9bd9df4b3b783a72fe75c9a11e/flake.nix#L31-L54) package](https://github.com/Xe/gohello/blob/caf54cdff7d8dd9bd9df4b3b783a72fe75c9a11e/flake.nix#L31-L54)
I walked us through in [the last I walked us through in [the last
post](https://christine.website/blog/nix-flakes-1-2022-02-21): post](https://xeiaso.net/blog/nix-flakes-1-2022-02-21):
```nix ```nix
# ... # ...

View File

@ -1,6 +1,6 @@
--- ---
title: "Nix Flakes: Exposing and using NixOS Modules" title: "Nix Flakes: Exposing and using NixOS Modules"
date: 2022-03-31 date: 2022-04-07
series: nix-flakes series: nix-flakes
tags: tags:
- nixos - nixos
@ -13,20 +13,24 @@ Nix flakes allow you to expose NixOS modules. NixOS modules are templates for
system configuration and they are the basis of how you configure NixOS. Today system configuration and they are the basis of how you configure NixOS. Today
we're going to take our Nix flake [from the last we're going to take our Nix flake [from the last
article](/blog/nix-flakes-2-2022-02-27) and write a NixOS module for it so that article](/blog/nix-flakes-2-2022-02-27) and write a NixOS module for it so that
we can deploy it to a server. we can deploy it to a container running locally. In the next post we will deploy
this to a server.
[If you haven't read <a href="/blog/series/nix-flakes">the other articles in [If you haven't read <a href="/blog/series/nix-flakes">the other articles in
this series</a>, you probably should. This article builds upon the previous this series</a>, you probably should. This article builds upon the previous
ones.](conversation://Mara/hacker) ones.](conversation://Mara/hacker)
NixOS modules are the main building block of how NixOS servers are configured. NixOS modules are building blocks that let you configure NixOS servers. Modules
They are like lego blocks that help you build up a server from off the shelf expose customizable options that expand out into system configuration.
parts. A module describes a desired system state and they build off of eachother Individually, each module is fairly standalone and self-contained, but they
in order to end up with a more elaborate result. build up together into your server configuration like a bunch of legos build
into a house. Each module describes a subset of your desired system
configuration and any options relevant to that configuration.
[You can think about them like Ansible playbooks, but NixOS modules describe the [You can think about them like Ansible playbooks, but NixOS modules describe the
desired end state instead of the steps you need to get to that end desired end state instead of the steps you need to get to that end
state.](conversation://Mara/hacker) state. It's the end result of evaluating all of your options against all of the
modules that you use in your configuration.](conversation://Mara/hacker)
NixOS modules are functions that take in the current state of the system and NixOS modules are functions that take in the current state of the system and
then return things to add to the state of the system. Here is a basic NixOS then return things to add to the state of the system. Here is a basic NixOS
@ -67,9 +71,47 @@ nix-repl> { foo = 1; } // { bar = 2; }
{ bar = 2; foo = 1; } { bar = 2; foo = 1; }
``` ```
<xeblog-conv name="Mara" mood="hacker">
Important pro tip: the merge operator is NOT recursive. If you try to do
something like:
```
nix-repl> foo = { bar = { baz = "foo"; }; }
nix-repl> (foo // { bar = { spam = "eggs"; }; }).bar
```
You will get:
```
{ spam = "eggs"; }
```
And not:
```
{ baz = "foo"; spam = "eggs"; }
```
This is because the `//` operator prefers things in the right hand side over the
left hand side if both conflict. To recursively merge two attribute sets (using
all elements from both sides), use
[lib.recursiveUpdate](https://nixos.org/manual/nixpkgs/stable/#function-library-lib.attrsets.recursiveUpdate):
```
nix-repl> (pkgs.lib.recursiveUpdate foo bar).bar
{ baz = "foo"; spam = "eggs"; }
```
</xeblog-conv>
We will use this to add the container configuration to the flake at the end of We will use this to add the container configuration to the flake at the end of
the flake.nix file. At the end of your flake.nix (just before the final closing the flake.nix file. We need to do this because the upper part of the flake with
`}`), there should be a line that looks like this: the `forAllSystems` call will generate a bunch of system-specific attributes for
each system we support. NixOS configurations don't support this level of
granularity.
At the end of your flake.nix (just before the final closing `}`), there should
be a line that looks like this:
```nix ```nix
}); });
@ -208,9 +250,20 @@ config = mkIf cfg.enable {
}; };
``` ```
[NOTE: You will want to be sure to do the following things to your copy of <xeblog-conv name="Mara" mood="hacker">
gohello: <ul><li>Move the definition of `defaultPackage` into the `packages` attribute set with the name `default` </li><li>Update `defaultApp` and the other entries to point to `self.packages.${system}.default` instead of `self.defaultPackage.${system}`</li></ul> We have updated previous articles and the template NOTE: If you have been following along since before this article was published,
accordingly.](conversation://Mara/hacker) you will want to be sure to do the following things to your copy of gohello:
* Move the definition of `defaultPackage` into the `packages` attribute set with
the name `default`
* Update `defaultApp` and the other entries to point to
`self.packages.${system}.default` instead of `self.defaultPackage.${system}`
We have updated previous articles and the template accordingly. Annoyingly it
seems that this change is new enough that it isn't totally documented on the
NixOS wiki. We are working on fixing this.
</xeblog-conv>
This will do the following things: This will do the following things:
@ -218,7 +271,9 @@ This will do the following things:
booted" and the network is active) booted" and the network is active)
- Automatically restarts the service when it crashes - Automatically restarts the service when it crashes
- Starts our `web-server` binary when running the service - Starts our `web-server` binary when running the service
- Creates a random user for the service - Creates a random, unique user account for the service (see
[here](http://0pointer.net/blog/dynamic-users-with-systemd.html) for more
information on how/why this works)
- Creates temporary, home and cache directories for the service, makes sure that - Creates temporary, home and cache directories for the service, makes sure that
random user has permission to use them (with the specified directory modes random user has permission to use them (with the specified directory modes
too) too)
@ -291,6 +346,11 @@ nixosModule = { config, lib, pkgs, ... }:
}; };
``` ```
[The service name is overly defensive. It's intended to avoid conflicting with
any other unit on the system named `gohello.service`. Feel free to remove this
part, it is really just defensive devops by design to avoid name
conflicts.](conversation://Mara/hacker)
Then you can add it to the container by importing our new module in its Then you can add it to the container by importing our new module in its
configuration and activating the gohello service: configuration and activating the gohello service:
@ -328,11 +388,21 @@ $ curl http://10.233.1.2 -H "Host: gohello.local.cetacean.club"
hello world :) hello world :)
``` ```
[As an exercise for the reader, try adding a <a <xeblog-conv name="Mara" mood="hacker">
href="https://nixos.org/manual/nixos/stable/index.html#sec-writing-modules">nixos Exercises for the reader:
option</a> that correlates to the `--bind` flag that `gohello` uses as the TCP
Try adding a [nixos
option](https://nixos.org/manual/nixos/stable/index.html#sec-writing-modules)
that correlates to the `--bind` flag that `gohello` uses as the TCP
address to serve HTTP from. You will want to have the type be address to serve HTTP from. You will want to have the type be
`types.port`.](conversation://Mara/hacker) `types.port`. If you are stuck, see
[here](https://github.com/Xe/nixos-configs/tree/master/common/services) for inspiration.
Also try adding `AmbientCapabilities = "CAP_NET_BIND_SERVICE"` and
`CapabilityBoundingSet = "CAP_NET_BIND_SERVICE"` to your `serviceConfig` and
bind `gohello` to port 80 without nginx involved at all.
</xeblog-conv>
You can delete this container with `sudo nixos-container destroy gohello` when You can delete this container with `sudo nixos-container destroy gohello` when
you are done with it. you are done with it.
@ -362,3 +432,12 @@ Next time I will cover how to install NixOS to a server and deploy system
configurations using [deploy-rs](https://github.com/serokell/deploy-rs). This configurations using [deploy-rs](https://github.com/serokell/deploy-rs). This
will allow you to have your workstation build configuration for your servers and will allow you to have your workstation build configuration for your servers and
push out all the changes from there. push out all the changes from there.
---
Many thanks to Open Skies for being my fearless editor that helps make these
things shine.
In part of this post I use my new Xeact-powered HTML component for some of the
conversation fragments, but the sizing was off on my iPhone when I tested it. If
you know what I am doing wrong, please [get in touch](/contact).

View File

@ -0,0 +1,341 @@
---
title: "Nix Flakes on WSL"
date: 2022-05-01
series: nix-flakes
tags:
- nixos
- wsl
vod:
youtube: https://youtu.be/VzQ_NwFJObc
twitch: https://www.twitch.tv/videos/1464781566
---
About five years ago, Microsoft released the Windows Subsystem for Linux
([WSL](https://docs.microsoft.com/en-us/windows/wsl/)). This allows you to run
Linux programs on a Windows machine. When they released WSL version 2 in 2019,
this added support for things like Docker and systemd. As a result, this is
enough to run NixOS on Windows.
<xeblog-conv name="Mara" mood="hacker">This will give you an environment to run
Nix and Nix Flakes commands with. You can use this to follow along with this
series without having to install NixOS on a VM or cloud server. This is going to
retread a bunch of ground from the first article. If you have been following
along through this entire series, once you get to the point where you convert
the install to flakes there isn't much more new material here.
</xeblog-conv>
## Installation
Head to the NixOS-WSL [releases
page](https://github.com/nix-community/NixOS-WSL/releases/) and download the
`nixos-wsl-installer-fixed.tar.gz` file to your Downloads folder.
Then open Powershell and make a folder called `WSL`:
```powershell
New-Item -Path .\WSL -ItemType Directory
```
<xeblog-conv name="Mara" mood="hacker">It's worth noting that Powershell does
have a bunch of aliases for common coreutils commands to the appropriate
Powershell CMDlets. However these aliases are <b>NOT</b> flag-compatible and use
the Powershell semantics instead of the semantics of the command it is aliasing.
This will bite you when you use commands like <code>wget</code> out of instinct
to download things. In order to avoid your muscle memory betraying you, the
Powershell CMDlets are shown here in their full overly verbose glory.
</xeblog-conv>
Then enter the directory with `Set-Location`:
```powershell
Set-Location -Path .\WSL
```
<xeblog-conv name="Mara" mood="hacker">This directory is where the NixOS root
filesystem will live. If you want to put this somewhere else, feel free to.
Somewhere in `%APPDATA%` will work, just as long as it's on an NTFS volume
somewhere.
</xeblog-conv>
Make a folder for the NixOS filesystem:
```powershell
New-Item -Path .\NixOS -ItemType Directory
```
Then install the NixOS root image with the `wsl` command:
```powershell
wsl --import NixOS .\NixOS\ ..\Downloads\nixos-wsl-installer-fixed.tar.gz --version 2
```
And start NixOS once to have it install itself:
```powershell
wsl -d NixOS
```
Once that finishes, press control-D (or use the `exit` command) to exit out of
NixOS and restart the WSL virtual machine:
```powershell
exit
wsl --shutdown
wsl -d NixOS
```
And then you have yourself a working NixOS environment! It's very barebones, but
we can use it to test the `nix run` command against our gohello command:
```console
$ nix run github:Xe/gohello
Hello reader!
```
## Local Services
We can also use this NixOS environment to run a local nginx server. Open
`/etc/nixos/configuration.nix`:
```nix
{ lib, pkgs, config, modulesPath, ... }:
with lib;
let
nixos-wsl = import ./nixos-wsl;
in
{
imports = [
"${modulesPath}/profiles/minimal.nix"
nixos-wsl.nixosModules.wsl
];
wsl = {
enable = true;
automountPath = "/mnt";
defaultUser = "nixos";
startMenuLaunchers = true;
# Enable integration with Docker Desktop (needs to be installed)
# docker.enable = true;
};
# Enable nix flakes
nix.package = pkgs.nixFlakes;
nix.extraOptions = ''
experimental-features = nix-command flakes
'';
}
```
Right after the `wsl` block, add this nginx configuration to the file:
```nix
services.nginx.enable = true;
services.nginx.virtualHosts."test.local.cetacean.club" = {
root = "/srv/http/test.local.cetacean.club";
};
```
This will create an nginx configuration that points the domain
`test.local.cetacean.club` to the contents of the folder `/srv/http/test.local.cetacean.club`.
<xeblog-conv name="Mara" mood="hacker">The <code>/srv</code> folder is set aside
for site-specific data, which is code for "do whatever you want with this
folder". In many cases people make a separate <code>/srv/http</code> folder and
put each static subdomain in its own folder under that, however I am also told
that it is idiomatic to put stuff in <code>/var/www</code>. Pick your poison.
</xeblog-conv>
Then you can test the web server with the `curl` command:
```console
$ curl http://test.local.cetacean.club
<html>
<head><title>404 Not Found</title></head>
<body>
<center><h1>404 Not Found</h1></center>
<hr><center>nginx</center>
</body>
</html>
```
This is good! Nginx is running and since we haven't created the folder with our
website content yet, this 404 means that it can't find it! Let's create the
folder so that nginx has permission to it and we can modify things in it:
```
sudo mkdir -p /srv/http/test.local.cetacean.club
sudo chown nixos:nginx /srv/http/test.local.cetacean.club
```
Finally we can make an amazing website. Open
`/srv/http/test.local.cetacean.club/index.html` in nano:
```
nano /srv/http/test.local.cetacean.club/index.html
```
And paste in this HTML:
```html
<title>amazing website xD</title>
<h1>look at my AMAZING WEBSITE</h1>
It's so cool *twerks*
```
<xeblog-conv name="Mara" mood="hacker">This doesn't have to just be artisanal
handcrafted HTML in bespoke folders either. You can set the <code>root</code> of
a nginx virtual host to point to a Nix package as well. This will allow you to
automatically generate your website somehow and deploy it with the rest of the
system. Including being able to roll back changes.</xeblog-conv>
And then you can see it show up with `curl`:
```console
$ curl http://test.local.cetacean.club
<title>amazing website xD</title>
<h1>look at my AMAZING WEBSITE</h1>
It's so cool *twerks*
```
You can also check this out in a browser by clicking
[here](http://test.local.cetacean.club):
![a browser window titled "amazing website xD" with the header "look at my
AMAZING WEBSITE" and content of "It's so cool
\*twerks\*"](https://cdn.christine.website/file/christine-static/blog/Screenshot+2022-04-23+141937.png)
## Installing `gohello`
To install the `gohello` service, first we will need to convert this machine to
use NixOS flakes. We can do that really quick and easy by adding this file to
`/etc/nixos/flake.nix`:
<xeblog-conv name="Mara" mood="happy">Do this as root!</xeblog-conv>
```nix
{
inputs = {
nixpkgs.url = "nixpkgs/nixos-unstable";
};
outputs = { self, nixpkgs, ... }: {
nixosConfigurations.nixos = nixpkgs.lib.nixosSystem {
system = "x86_64-linux";
modules = [
./configuration.nix
# add things here
];
};
};
}
```
Then run `nix flake check` to make sure everything is okay:
```
sudo nix flake check /etc/nixos
```
And finally activate the new configuration with flakes:
```
sudo nixos-rebuild switch
```
<xeblog-conv name="Mara" mood="hmm">Why don't you have the <code>--flake</code>
flag here? Based on what I read in the documentation, I thought you had to have
it there.</xeblog-conv>
<xeblog-conv name="Cadey" mood="enby"><code>nixos-rebuild</code> will
auomatically detect flakes in <code>/etc/nixos</code>. The only major thing it
cares about is the hostname matching. If you want to customize the hostname of
the WSL VM, change the <code>nixos</code> in
<code>nixosConfigurations.nixos</code> above and set
<code>networking.hostName</code> to the value you want to use. To use flakes
explicitly, pass <code>--flake /etc/nixos#hostname</code> to your
<code>nixos-rebuild</code> call.
</xeblog-conv>
After it thinks for a bit, you should notice that nothing happened. This is
good, we have just converted the system over to using Nix flakes instead of the
classic `nix-channel` rebuild method.
To get `gohello` in the system, first we need to add `git` to the commands
available on the system in `configuration.nix`:
```nix
environment.systemPackages = with pkgs; [ git ];
```
Then we can add `gohello` to our system flake:
```nix
{
inputs = {
nixpkgs.url = "nixpkgs/nixos-unstable";
# XXX(Xe): this URL may change for you, such as github:Xe/gohello-http
gohello.url = "git+https://tulpa.dev/cadey/gohello-http?ref=main";
};
outputs = { self, nixpkgs, gohello, ... }: {
nixosConfigurations.nixos = nixpkgs.lib.nixosSystem {
system = "x86_64-linux";
modules = [
./configuration.nix
# add things here
gohello.nixosModule
({ pkgs, ... }: {
xeserv.services.gohello.enable = true;
})
];
};
};
}
```
<xeblog-conv name="Mara" mood="hacker">The block of code under
<code>gohello.nixosModule</code> is an inline NixOS module. If we put
<code>gohello.nixosModule</code> before the <code>./configuration.nix</code>
reference, we could put the <code>xeserv.services.gohello.enable = true;</code>
line inside <code>./configuration.nix</code>. This is an exercise for the
reader.</xeblog-conv>
And rebuild the system with `gohello` enabled:
```
sudo nixos-rebuild switch
```
Finally, poke it with `curl`:
```console
$ curl http://gohello.local.cetacean.club
hello world :)
```
To update it, update the flake inputs in `/etc/nixos` and run `nixos-rebuild`:
```
sudo nix flake update /etc/nixos
sudo nixos-rebuild switch
```
---
And from here you can do whatever you want with NixOS. You can use
[containers](https://nixos.org/manual/nixos/stable/#ch-containers), set up
arbitrary services, or plan for world domination as normal.
<xeblog-conv name="Numa" mood="delet">I thought it was "to save the world from
devastation", not "to plan for world domination". Who needs a monopoly on
violence for world domination when you have Nix expressions?</xeblog-conv>
<xeblog-conv name="Cadey" mood="coffee">Siiiiiiiiiiiiiiiiiigh.</xeblog-conv>
I will use this setup in future posts to make this more accessible and easy to
hack at without having to have a dedicated NixOS machine laying around.

View File

@ -140,5 +140,5 @@ for more information.
--- ---
Also happy December! My site has the [snow Also happy December! My site has the [snow
CSS](https://christine.website/blog/let-it-snow-2018-12-17) loaded for the CSS](https://xeiaso.net/blog/let-it-snow-2018-12-17) loaded for the
month. Enjoy! month. Enjoy!

View File

@ -6,7 +6,7 @@ author: ectamorphic
Recently I got a new VR setup that uses my tower directly instead of the [wifi Recently I got a new VR setup that uses my tower directly instead of the [wifi
streaming streaming
catastrophe](https://christine.website/blog/convoluted-vrchat-gchat-setup-2021-02-24). catastrophe](https://xeiaso.net/blog/convoluted-vrchat-gchat-setup-2021-02-24).
I have a [Valve Index](https://store.steampowered.com/valveindex) and an [AMD I have a [Valve Index](https://store.steampowered.com/valveindex) and an [AMD
RX6700XT](https://www.amd.com/en/products/graphics/amd-radeon-rx-6700-xt) GPU. RX6700XT](https://www.amd.com/en/products/graphics/amd-radeon-rx-6700-xt) GPU.
Some huge advantages of this setup include: Some huge advantages of this setup include:

View File

@ -5,10 +5,10 @@ date: 2019-01-17
I found an old backup that contained a few articles from my old [Medium](https://medium.com/@theprincessxena) blog. I have converted them to markdown and added them to the blog archives: I found an old backup that contained a few articles from my old [Medium](https://medium.com/@theprincessxena) blog. I have converted them to markdown and added them to the blog archives:
- 2014-11-28 - [Web Application Development with Beego](https://christine.website/blog/beego-2014-11-28) - 2014-11-28 - [Web Application Development with Beego](https://xeiaso.net/blog/beego-2014-11-28)
- 2014-11-20 - [Dependency Hell](https://christine.website/blog/dependency-hell-2014-11-20) - 2014-11-20 - [Dependency Hell](https://xeiaso.net/blog/dependency-hell-2014-11-20)
- 2014-11-18 - [My Experience with Atom as A Vim User](https://christine.website/blog/atom-as-vim-2014-11-18) - 2014-11-18 - [My Experience with Atom as A Vim User](https://xeiaso.net/blog/atom-as-vim-2014-11-18)
- 2014-10-24 - [Instant Development Environments in Docker](https://christine.website/blog/dev-2014-10-24) - 2014-10-24 - [Instant Development Environments in Docker](https://xeiaso.net/blog/dev-2014-10-24)
- 2014-10-20 - [MPD Via Docker](https://christine.website/blog/mpd-docker-2014-10-20) - 2014-10-20 - [MPD Via Docker](https://xeiaso.net/blog/mpd-docker-2014-10-20)
I hope these are at all useful. I hope these are at all useful.

View File

@ -4,7 +4,7 @@ date: 2018-09-05
series: olin series: olin
--- ---
This post is a continuation of [this post](https://christine.website/blog/olin-1-why-09-1-2018). This post is a continuation of [this post](https://xeiaso.net/blog/olin-1-why-09-1-2018).
Suppose you are given the chance to throw out the world and start from scratch Suppose you are given the chance to throw out the world and start from scratch
in a minimal environment. You can then work up from nothing and build the world in a minimal environment. You can then work up from nothing and build the world

View File

@ -13,7 +13,7 @@ tags:
Over the last week or so I've been doing a _lot_ of improvements to [Olin][olin] in order to make it ready to be the kernel for the minimum viable product of [wasmcloud][wasmcloud-hello-world]. Here's an overview of the big things that have happened from version [0.1.1][olin-0.1.1] to version [0.4.0][olin-0.4.0]. Over the last week or so I've been doing a _lot_ of improvements to [Olin][olin] in order to make it ready to be the kernel for the minimum viable product of [wasmcloud][wasmcloud-hello-world]. Here's an overview of the big things that have happened from version [0.1.1][olin-0.1.1] to version [0.4.0][olin-0.4.0].
[olin]: https://github.com/Xe/olin [olin]: https://github.com/Xe/olin
[wasmcloud-hello-world]: https://christine.website/blog/wasmcloud-progress-2019-12-08 [wasmcloud-hello-world]: https://xeiaso.net/blog/wasmcloud-progress-2019-12-08
[olin-0.1.1]: https://github.com/Xe/olin/releases/tag/v0.1.1 [olin-0.1.1]: https://github.com/Xe/olin/releases/tag/v0.1.1
[olin-0.4.0]: https://github.com/Xe/olin/releases/tag/v0.4.0 [olin-0.4.0]: https://github.com/Xe/olin/releases/tag/v0.4.0
@ -31,7 +31,7 @@ As Olin is just a kernel, it needs some work in order to really shine as a true
Here is what has been done since the [last Olin post][last-olin-post]: Here is what has been done since the [last Olin post][last-olin-post]:
[last-olin-post]: https://christine.website/blog/olin-2-the-future-09-5-2018 [last-olin-post]: https://xeiaso.net/blog/olin-2-the-future-09-5-2018
* An official, automated build of the example Olin components has been published to the Docker Hub * An official, automated build of the example Olin components has been published to the Docker Hub
* The Go ABI has been deprecated for the moment * The Go ABI has been deprecated for the moment

View File

@ -13,7 +13,7 @@ In my [last post][pahihelloworld] I mentioned that pa'i was faster than Olin's
cwa binary written in go without giving any benchmarks. I've been working on new cwa binary written in go without giving any benchmarks. I've been working on new
ways to gather and visualize these benchmarks, and here they are. ways to gather and visualize these benchmarks, and here they are.
[pahihelloworld]: https://christine.website/blog/pahi-hello-world-2020-02-22 [pahihelloworld]: https://xeiaso.net/blog/pahi-hello-world-2020-02-22
Benchmarking WebAssembly implementations is slightly hard. A lot of existing Benchmarking WebAssembly implementations is slightly hard. A lot of existing
benchmark tools simply do not run in WebAssembly as is, not to mention inside benchmark tools simply do not run in WebAssembly as is, not to mention inside

View File

@ -115,7 +115,7 @@ production-facing servers should probably only be able to be connected to over a
VPN of some kind. VPN of some kind.
If you want to see more about how to set up WireGuard on NixOS, see If you want to see more about how to set up WireGuard on NixOS, see
[here](https://christine.website/blog/my-wireguard-setup-2021-02-06) for more [here](https://xeiaso.net/blog/my-wireguard-setup-2021-02-06) for more
information. information.
## Locking Down the Hatches ## Locking Down the Hatches
@ -130,7 +130,7 @@ I am going to use the word "service" annoyingly vague here. In this world, a
"service" is a human-oriented view of "computer does the thing I want it to do". "service" is a human-oriented view of "computer does the thing I want it to do".
This website you're reading this post on could be one service, and it should This website you're reading this post on could be one service, and it should
have a separate account from other services. See have a separate account from other services. See
[here](https://christine.website/blog/nixops-services-2020-11-09) for more [here](https://xeiaso.net/blog/nixops-services-2020-11-09) for more
information on how to set this up. information on how to set this up.
### Lock Down Services Within Systemd ### Lock Down Services Within Systemd
@ -433,7 +433,7 @@ where I show you how to automatically create an ISO that does all this for you.
### Repeatable Base Image with an ISO ### Repeatable Base Image with an ISO
Using the setup I mentioned [in a past Using the setup I mentioned [in a past
post](https://christine.website/blog/my-homelab-2021-06-08), you can create an post](https://xeiaso.net/blog/my-homelab-2021-06-08), you can create an
automatic install ISO that will take a blank disk to a state where you can SSH automatic install ISO that will take a blank disk to a state where you can SSH
into it and configure it further using a tool like into it and configure it further using a tool like
[morph](https://github.com/DBCDK/morph). Take a look at [this [morph](https://github.com/DBCDK/morph). Take a look at [this

View File

@ -9,7 +9,7 @@ tags:
- r13y - r13y
--- ---
In [the last post](https://christine.website/blog/paranoid-nixos-2021-07-18) we In [the last post](https://xeiaso.net/blog/paranoid-nixos-2021-07-18) we
covered a lot of the base groundwork involved in making a paranoid NixOS setup. covered a lot of the base groundwork involved in making a paranoid NixOS setup.
Today we're gonna throw this into prod by making a base NixOS image with it. Today we're gonna throw this into prod by making a base NixOS image with it.

View File

@ -37,7 +37,7 @@ closest friends that I can talk about anything with, even what would normally
violate an NDA. My closest friends are so close that language isn't even as much violate an NDA. My closest friends are so close that language isn't even as much
of a barrier as it would be otherwise. of a barrier as it would be otherwise.
As I've mentioned in the past, [I have tulpas](https://christine.website/blog/what-its-like-to-be-me-2018-06-14). As I've mentioned in the past, [I have tulpas](https://xeiaso.net/blog/what-its-like-to-be-me-2018-06-14).
They are people that live with me like roommates inside my body. It really does They are people that live with me like roommates inside my body. It really does
sound strange or psychotic; but you'll just have to trust me when I say they sound strange or psychotic; but you'll just have to trust me when I say they
fundamentally help me live my life, do my job and do other things people fundamentally help me live my life, do my job and do other things people

View File

@ -0,0 +1,53 @@
---
title: Stop Using Politics As A Cudgel To Discourage Experimentation
date: 2022-04-21
tags:
- rant
- systemd
- communityhealth
---
So let's say you get bored one day and you decide you want to do things that god
and man have decreed impossible. Let's also say that this exact thing involves a
tool that just happens to rustle all of the jimmies (for reasons that are not
entirely clear). Then you get it all to a point where you want to submit it
upstream so you can get help experimenting with this tool.
So you submit it to upstream in the experimental branch, expecting very little
pushback so you can get help tinkering with things. But once you submit it
upstream, [all hell breaks
loose](https://gitlab.alpinelinux.org/alpine/aports/-/merge_requests/33329).
Stop using politics as a cudgel to discourage experimentation. Yes it involves
systemd. Just because you think that the tool is overcomplicated doesn't mean
that other people don't find it useful. Trying to shut down experimentation is
how you get people to leave the community or give up participating in open
source altogether.
The reactions in that thread are both disappointing and somewhat to be expected.
I don't know why people have such a negative reaction to systemd. It's just an
init system, not a religion. It wouldn't have become a good choice for so much
of the Linux ecosystem without it having solid technical merits. If it is really
that bad then the mantle of responsibility is on you for coming up with a better
option.
[No, OpenRC is not that option. It can be PART OF an option, but it is not a
competitor by itself.](conversation://Cadey/coffee)
I know I said I'd stop ranting on this blog as much, but really this stuff
grinds my gears and I feel that I should use my platform for good in this
regard. This is inexcusable. I want to reiterate that I have _no_ power in this
regard. I am just some random person on a blog that got frustrated at the
reactions to this contribution. Some pushback is acceptable. Accusing a
contributor of ignorance is inexcusable. Comments like this have no place in
open source contributions:
> SysTemD is the STD of operating systems. There is no "one little poke", you
> can't be a little bit pregnant.
Jake, if you're out there reading this: keep doing this thing. It is a fantastic
creation that I thought was impossible. You may have to soft-fork the
distribution to get this to work reliably, but I really want to see where this
rabbit-hole goes.
Keep hacking.

View File

@ -14,7 +14,7 @@ My work laptop uses KDE, so I tried out
really liked this. I think one of the major differences between how I've been really liked this. I think one of the major differences between how I've been
failing at pomodoro in the past and why it's been working now is that I've failing at pomodoro in the past and why it's been working now is that I've
worked it into my [daily note-taking/TODO worked it into my [daily note-taking/TODO
workflow](https://christine.website/blog/gtd-on-paper-2021-06-13). I label each workflow](https://xeiaso.net/blog/gtd-on-paper-2021-06-13). I label each
pomodoro (my notes call them "Pom" because that isn't something I write often in pomodoro (my notes call them "Pom" because that isn't something I write often in
them) as a section in my notes and then include a few TODO items under it. I'll them) as a section in my notes and then include a few TODO items under it. I'll
also add some notes to the pom in case I need them later. also add some notes to the pom in case I need them later.

View File

@ -49,12 +49,12 @@ Here is an example web app manifest [from my portfolio site](https://github.com/
"background_color": "#fa99ca", "background_color": "#fa99ca",
"display": "standalone", "display": "standalone",
"scope": "/", "scope": "/",
"start_url": "https://christine.website/", "start_url": "https://xeiaso.net/",
"description": "Blog and Resume for Christine Dodrill", "description": "Blog and Resume for Christine Dodrill",
"orientation": "any", "orientation": "any",
"icons": [ "icons": [
{ {
"src": "https://christine.website/static/img/avatar.png", "src": "https://xeiaso.net/static/img/avatar.png",
"sizes": "1024x1024" "sizes": "1024x1024"
} }
] ]
@ -65,7 +65,7 @@ If you just want to create a manifest quickly, check out [this](https://app-mani
## Add Manifest to Your Base HTML Template ## Add Manifest to Your Base HTML Template
I suggest adding the HTML link for the manifest to the most base HTML template you can, or in the case of a purely client side web app its main `index.html` file, as it needs to be as visible by the client trying to install the app. Adding this is [simple](https://developer.mozilla.org/en-US/docs/Web/Apps/Progressive/Installable_PWAs), assuming you are hosting this manifest on [/static/manifest.json](https://christine.website/static/manifest.json) – simply add it to the <head> section: I suggest adding the HTML link for the manifest to the most base HTML template you can, or in the case of a purely client side web app its main `index.html` file, as it needs to be as visible by the client trying to install the app. Adding this is [simple](https://developer.mozilla.org/en-US/docs/Web/Apps/Progressive/Installable_PWAs), assuming you are hosting this manifest on [/static/manifest.json](https://xeiaso.net/static/manifest.json) – simply add it to the <head> section:
```html ```html
<link rel="manifest" href="/static/manifest.json"> <link rel="manifest" href="/static/manifest.json">
@ -96,7 +96,7 @@ At a high level, consider what assets and pages you want users of your website t
* Contact information for the person, company or service running the progressive web app * Contact information for the person, company or service running the progressive web app
* Any other pages or information you might find useful for users of your website * Any other pages or information you might find useful for users of your website
For example, I have the following precached for [my portfolio site](https://christine.website): For example, I have the following precached for [my portfolio site](https://xeiaso.net):
* My homepage (implicitly includes all of the CSS on the site) `/` * My homepage (implicitly includes all of the CSS on the site) `/`
* My blog index `/blog/` * My blog index `/blog/`

View File

@ -14,7 +14,7 @@ language help people understand where the boundaries between syllables are. I
will then describe my plans for the L'ewa orthography and how L'ewa is will then describe my plans for the L'ewa orthography and how L'ewa is
romanized. This is a response to the prompt made [here][rclm2prompt]. romanized. This is a response to the prompt made [here][rclm2prompt].
[rclm1]: https://christine.website/blog/reconlangmo-1-name-ctx-history-2020-05-05 [rclm1]: https://xeiaso.net/blog/reconlangmo-1-name-ctx-history-2020-05-05
[rclm2prompt]: https://www.reddit.com/r/conlangs/comments/gfp3hw/reconlangmo_2_phonology_writing/ [rclm2prompt]: https://www.reddit.com/r/conlangs/comments/gfp3hw/reconlangmo_2_phonology_writing/
## Phonology ## Phonology

View File

@ -15,7 +15,7 @@ making the vocabulary for L'ewa and I'll include an entire table of the
dictionary words. This answers [this dictionary words. This answers [this
prompt](https://www.reddit.com/r/conlangs/comments/gojncp/reconlangmo_6_lexicon/). prompt](https://www.reddit.com/r/conlangs/comments/gojncp/reconlangmo_6_lexicon/).
[reconlangmo]: https://christine.website/blog/series/reconlangmo [reconlangmo]: https://xeiaso.net/blog/series/reconlangmo
## Word Distinctions ## Word Distinctions

View File

@ -15,7 +15,7 @@ post will start to cover a lot of the softer skills behind L'ewa as well as
cover some other changes I'm making under the hood. This is a response to [this cover some other changes I'm making under the hood. This is a response to [this
prompt][rclm7]. prompt][rclm7].
[reconlangmo]: https://christine.website/blog/series/reconlangmo [reconlangmo]: https://xeiaso.net/blog/series/reconlangmo
[rclm7]: https://www.reddit.com/r/conlangs/comments/gqo8jn/reconlangmo_7_discourse/ [rclm7]: https://www.reddit.com/r/conlangs/comments/gqo8jn/reconlangmo_7_discourse/
## Information Structure ## Information Structure

View File

@ -0,0 +1,118 @@
---
title: "What To Do As A Recruiter When A Gender-diverse Person Asks You To Update Their Name"
date: 2022-04-01
---
[I really wish this was an April Fool's post. I had a few ideas planned, but
maybe you will get to see them next year.<br /><br />As a reminder, I am
speaking for myself and not for my employer.](conversation://Cadey/coffee)
This post is directed at all of the recruiters that are reading this blog. This
is a scenario that many of you may not have dealt with. After having an example
of this with a recruiter recently I figure it's a teaching moment.
I am speaking up about this because I know many others who have gone through the
same kinds of problems and have not felt safe to speak up about them. I am not
speaking for those people in this post, but I want to use my platform as a
blogger to amplify the sentiment of what I have heard over the years.
## To Recruiters
As a recruiter, if you are cold-emailing someone, please do the research to get
their name correct. If you do not and someone asks you to correct it, do it.
When gender-diverse people like me get an email that references an out of date
name, it is seen as a sign that the person sending that email has not done their
research before sending that email out into the void.
When you correct that name in your system also make sure to cancel all outgoing
automated emails to that person. The caching layer of the recruiting system may
have already drafted those emails based on a template. If they go out, this will
be seen as a _massive sign of disrespect_. It will also make the person
receiving that email question if you _actually corrected_ the name in that
system or not. It may make the recipient also question if you are just giving
them lip service to save face instead of making a genuine effort to ensure that
the recruiting system has accurate information in it.
This is not a good way to foster the kind of trust needed for a gender-diverse
person to want to choose your employer as the single point of failure for access
to medication, food and regular medical checkups. For many gender-diverse
people, changing jobs can mean an interruption of access to life-saving
medication.
You may get a slightly angry reply if you send out emails with incorrect
information. This can happen because gender-diverse people are likely to feel
like society really doesn't care about them and that they are not being
respected to have agency over their identity. To some this is a fact and not
a feeling. And with
[all](https://www.theguardian.com/us-news/2022/mar/10/idaho-bill-trans-youth-treatment-ban-passes-house)
[of](https://www.washingtonpost.com/dc-md-va/2022/03/17/texas-trans-child-abuse-investigations/)
[the](https://www.nbcnews.com/nbc-out/out-politics-and-policy/alabama-bill-seeks-ban-hormone-treatments-trans-youth-rcna18512)
[actions](https://www.hrc.org/press-releases/breaking-2021-becomes-record-year-for-anti-transgender-legislation)
governments have been taking to directly attack the freedoms and rights to
self-determination that gender-diverse people like me rely on, you can't blame
them for being fed up with the situation. It is not fun to feel like your very
existence is made out to be some black mark of doom on Western civilization. It
is even less fun to be reminded of that when reading your email inbox. Please
understand that we mean well, society is just broken in general.
The least you can do is ensure that you do _any amount of research_ to ensure
that you are using the correct name. It may be a good idea to add the following
text to your recruiting emails (before you brag about fundraising is probably
best, I tune out about then):
> If I got your name incorrect, please let me know what name/pronouns you would
> like me to update our system to use. I got this name from $SOURCE.
Adding the source of where you got that name from can help make this less
stressful for gender-diverse people. People's names are spattered everywhere
across the internet. Letting people know where you got that information from can
help them know what to fix if a fix is needed.
Some chosen names may seem "weird" due to societal biases that serve to ensure
that the primary way that people use to refer to eachother in particular are not
chosen by the people being referred to. Trust that the person on the other end
is being honest about their identity. The truth requires no belief.
If they ask you to update their pronouns, respect that and ensure you use them
without failure. Using the wrong pronouns can be seen as an even worse
disrespect than using the wrong name. You do not want this to happen if your
goal is to find people to hire.
## To Gender-diverse People
Yeah, this situation sucks. I can't disagree. You really do need to assume good
faith as much as you can. Most of these recruiter systems rely on ["data
enrichment" APIs](https://clearbit.com/) and potentially outdated mass scraping
of LinkedIn and people's blogs.
It can help if you make publicly available posts like
[this](/blog/xe-2021-08-07) that unambiguously say what you want people to call
you by. Keep it updated in case journalists decide to compare your chosen name
to mercenary groups.
Try to be as polite and direct as possible. Here is an example of how I have
asked recruiters to update their information in the past:
> Please update your files with the name "Xe Iaso" (capital I). I am
> slowly moving away from "Christine Dodrill" as the name I use to
> represent myself professionally.
If you are moving away from a "dead name", you may want to use something like
this:
> I have no record of a "Christine Dodrill" at this email address. You may want
> to look elsewhere. If you would like to proceed with me instead, here is
> information about me: https://xeiaso.net.
Throw in your pronouns too to be safe.
[I really need to change this blog's domain, but I have such amazing SEO that I
really don't want to break it.](conversation://Cadey/coffee)
Also consider deleting the email and not replying to them. That's totally valid
too unless you are in desperate need for a new employer.
You do not need to justify speaking up about an employer having the wrong name
for you. The truth requires no belief. Speaking the truth to power is the
essence of valor, which is one of the highest forms of love.

View File

@ -0,0 +1,474 @@
---
title: "robocadey: Shitposting as a Service"
date: 2022-04-30
tags:
- gpt2
- machinelearning
- python
- golang
- art
vod:
twitch: https://www.twitch.tv/videos/1471211336
youtube: https://youtu.be/UAd-mWMG198
---
<noscript>
[Hey, you need to enable JavaScript for most of the embedded posts in this
article to work. Sorry about this, we are working on a better solution, but this
is what we have right now.](conversation://Mara/hacker)
</noscript>
What is art? Art is when you challenge the assumptions that people make about a
medium and use that conflict to help them change what they think about that
medium. Let's take "Comedian" by Maurizio Cattelan for example:
![A banana duct-taped to an artist's
canvas](https://cdn.christine.website/file/christine-static/blog/merlin_165616527_d76f38fc-e45d-4913-9780-1cc939750197-superJumbo.jpg)
By my arbitrary definition above, this is art. This takes assumptions that you
have about paintings (you know, that they use paint on the canvas) and discards
them. This lets you change what you think art is. Art is not about the medium or
the things in it. Art is the expression of these things in new and exiting ways.
<xeblog-conv name="Cadey" mood="coffee">Originally I was going to use some
Banksky art here, but for understandable reasons it's quite difficult to get
images of Banksky art.</xeblog-conv>
One of my favorite kinds of art is the "uncanny valley" of realism. Let's take
Death Stranding as an example of this. Death Stranding is a video game that was
released in 2019 for the PlayStation 4 and is one of my favorite games of all
time. The game has a very hyper-realistic art style that is firmly in the
centre of the uncanny valley:
![A picture of Death Stranding gameplay, showing the protagonist Sam Porter
Bridges attempting to climb a sheer cliff face using a rope that another player
left
behind](https://cdn.christine.website/file/christine-static/blog/20220202215156_3.jpg)
This game mixes very realistic scenery with a story about dead bodies turning
into antimatter and you being a UPS delivery person that saves America. This is
art to me. This transformed what a video game could be, even if the entire game
boils down to Kojima themed fetch quests. Oh and trying not to die even though
you can't die but when you die it's really bad.
I want to create this kind of art, and I think I have found a good medium to do
this with. I write a lot on this little independent site called Twitter. This is
one of the main things that I write on, and through the process of the last 8
years or so, I've written a shockingly large amount of things. I post a lot of
weird things there as well as a lot of boring/normal things.
However a lot of my posts boil down to creating a "stream of consciousness", or
using it as a way to help deal with intrusive thoughts. There's a certain art to
this, as it is a candid exchange between the author and the reader. The reader
doesn't get all the context (heck, I doubt that I have all the context lol), but
from there they get to put the pieces together.
So, when thinking about trying to get into the uncanny valley with this kind of
art medium, my mind goes back to the old days on IRC channels. Many IRC channels
run bots to help them run the channel or purely for amusement. One of my
favorite kinds of bots is a [Markov
chain](https://en.wikipedia.org/wiki/Markov_chain) bot. These kinds of bots
learn patterns in text and then try to repeat them at random. With enough
training data, it can be fairly convincing at first glance. However, you need _a
lot_ of training data to get there. More training data than I have ever tweeted.
This ends up creating a situation where the markov bot is right in the uncanny
valley of realism. At first glance it is something that isn't not plausibly
human. It looks like a bot, but it also looks like a human, but it also looks
like a bot. It appears to be in the middle. I like this from an artistic
standpoint because this challenges your assumptions that bots need to be
obviously bots and humans need to be obviously human.
In the past I have ran a service I call `cadeybot`. It took all of my Discord
messages, fed them into a Markov chain, and then attempted to create new
messages as a result. This worked pretty well, but we ran into an issue where it
would basically regurgitate its training data. So when people thought it was
being novel about roasting people, someone would search the chat and find out
that I said those exact words 2 years ago.
This isn't really exciting from an artistic point of view. You could get the
same result from randomly replying with old chat messages without any additional
data in the mix.
I haven't run `cadeybot` in some time because of this. It gets really boring
really fast.
However, I was looking at some DALL-E generated images and then inspiration
struck:
<xeblog-conv name="Mara" mood="hmm">What if I fed all those tweets into
[GPT-2](https://en.wikipedia.org/wiki/GPT-2)?</xeblog-conv>
So I did that. I made [@robocadey@botsin.space](https://botsin.space/@robocadey)
as a fediverse bot that generates new content based on everything I've ever
tweeted.
<iframe src="https://botsin.space/@robocadey/108219835651549836/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="500"
height="245" allowfullscreen="allowfullscreen"></iframe>
## Data
The first step of this is getting all of my tweet data out of Twitter. This
was a lot easier than I thought. All I had to do was submit a GDPR data request,
wait a few days for the cloud to think and then I got a 3 gigabyte zip file full
of everything I've ever tweeted. Cool!
Looking through the dump, I found a 45 megabyte file called `tweets.js`. This
looked like it could be important! So I grabbed it and looked at the first few
lines:
```javascript
$ head tweet.js
window.YTD.tweet.part0 = [
{
"tweet" : {
"retweeted" : false,
"source" : "<a href=\"http://www.bitlbee.org/\" rel=\"nofollow\">BitlBee</a>",
"entities" : {
"hashtags" : [ ],
"symbols" : [ ],
"user_mentions" : [
{
```
So it looks like most of this is really just a giant block of data that's
stuffed into JavaScript so that the embedded HTML can show off everything you've
ever tweeted. Neat, but I only need the tweet contents. We can strip off the
preamble with `sed`, and then grab the first entry out of `tweets.js` with a
command like this:
```json
$ cat tweet.js | sed 's/window.YTD.tweet.part0 = //' | jq .[0]
{
"tweet": {
"retweeted": false,
"source": "<a href=\"http://www.bitlbee.org/\" rel=\"nofollow\">BitlBee</a>",
"entities": {
"hashtags": [],
"symbols": [],
"user_mentions": [
{
"name": "@Lyude@queer.party🌹",
"screen_name": "_Lyude",
"indices": [
"0",
"7"
],
"id_str": "1568160860",
"id": "1568160860"
}
],
"urls": []
},
"display_text_range": [
"0",
"83"
],
"favorite_count": "0",
"in_reply_to_status_id_str": "481634023295709185",
"id_str": "481634194729488386",
"in_reply_to_user_id": "1568160860",
"truncated": false,
"retweet_count": "0",
"id": "481634194729488386",
"in_reply_to_status_id": "481634023295709185",
"created_at": "Wed Jun 25 03:05:15 +0000 2014",
"favorited": false,
"full_text": "@_Lyude but how many licks does it take to get to the centre of a tootsie roll pop?",
"lang": "en",
"in_reply_to_screen_name": "_Lyude",
"in_reply_to_user_id_str": "1568160860"
}
}
```
It looks like most of what I want is in `.tweet.full_text`, so let's make a
giant text file with everything in it:
```sh
sed 's/window.YTD.tweet.part0 = //' < tweets.js \
| jq '.[] | [ select(.tweet.retweeted == false) ] | .[].tweet.full_text' \
| sed -r 's/\s*\.?@[A-Za-z0-9_]+\s*//g' \
| grep -v 'RT:' \
| jq --slurp . \
| jq -r .[] \
| sed -e 's!http[s]\?://\S*!!g' \
| sed '/^$/d' \
> tweets.txt
```
This does a few things:
1. Removes that twitter preamble so jq is happy
2. Removes all at-mentions from the training data (so the bot doesn't go on a
mentioning massacre)
3. Removes the "retweet" prefixed tweets from the dataset
4. Removes all urls
5. Removes all blank lines
This should hopefully cut out all the irrelevant extra crap and let the machine
learning focus on my text, which is what I actually care about.
## Getting It Up
As a prototype, I fed this all into Markov chains. This is boring, but I was
able to graft together a few projects to get that prototype up quickly. After
some testing, I ended up with things like this:
<iframe src="https://botsin.space/@robocadey/108201675365283068/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="500"
height="225" allowfullscreen="allowfullscreen"></iframe>
This was probably the best thing to come out of the Markov chain testing phase,
the rest of it was regurgitating old tweets.
While I was doing this, I got GPT-2 training thanks to [this iPython
notebook](https://colab.research.google.com/github/sarthakmalik/GPT2.Training.Google.Colaboratory/blob/master/Train_a_GPT_2_Text_Generating_Model_w_GPU.ipynb).
I uploaded my 1.5 megabyte tweets.txt file and let the big pile of linear
algebra mix around for a bit.
Once it was done, I got a one gigabyte tarball that I extracted into a new
folder imaginatively named `gpt2`. Now I had the model, all I needed to do was
run it. So I wrote some Python:
```python
#!/usr/bin/env python3
import gpt_2_simple as gpt2
import json
import os
import socket
import sys
from datetime import datetime
sockpath = "/xe/gpt2/checkpoint/server.sock"
sess = gpt2.start_tf_sess()
gpt2.load_gpt2(sess, run_name='run1')
if os.path.exists(sockpath):
os.remove(sockpath)
sock = socket.socket(socket.AF_UNIX)
sock.bind(sockpath)
print("Listening on", sockpath)
sock.listen(1)
while True:
connection, client_address = sock.accept()
try:
print("generating shitpost")
result = gpt2.generate(sess,
length=512,
temperature=0.8,
nsamples=1,
batch_size=1,
return_as_list=True,
top_p=0.9,
)[0].split("\n")[1:][:-1]
print("shitpost generated")
connection.send(json.dumps(result).encode())
finally:
connection.close()
server.close()
os.remove("/xe/gpt2/checkpoint/server.sock")
```
And I used a Dockerfile to set up its environment:
```Dockerfile
FROM python:3
RUN pip3 install gpt-2-simple
WORKDIR /xe/gpt2
COPY . .
CMD python3 main.py
```
Then I bind-mounted the premade model into the container and asked it to think
up something for me. I got back a list of replies and then I knew it was good to
go:
```json
[
"oh dear. I don't know if you're the best mannered technologist you've come to expect from such a unique perspective. On the technical side of things, you're a world-class advocate for open source who recently lost an argument over the state of the open source world to bitter enemies like Python.",
"I also like your approach to DNS! One step at a time. More info here: ",
"tl;dr: it's a bunch of random IP addresses and the outcome is a JSON file that you fill out in as you go.",
"datasoftware.reddit.com/r/programmingcirclejerk-memes",
"datasoftware.reddit.com/r/programmingcirclejerk-memes",
"datasoftware.reddit.com/r/programmingcirclejerk-memes",
"datasoftware.reddit.com/r/programmingcirclejerk-memes",
"Oh dear, can we third-person?",
"A group of us is a CVE-1918 impact statement",
"Is that breaking news?",
"Lol datasom shitposting omg ",
"I'm gonna be on the list for #Giving is easy, don't look so far ahead ",
"Oh dear. Welcome to ThePandora: ",
"I use a lot of shift lol",
"I thought you were an orca",
"Foone, my old computer crashed. What happened to your hard drive? ",
"Yeah I know some of those things should be automated, but this is about experimentation and experimentation is what makes me happy",
"Am I? ",
"Experiment is my favorite part of the article",
"Yes I can, scroll past the how to read words videos",
"I was able to see into space but I cannot seen into your eyes",
"This is with a virtual keyboard/MAC address field",
"Yes but with the keymap \"~M\"",
"Yes this is a structural change, I am trying to tease things out a bit. I am trying to make it slightly different sounding with the key mapping. I am trying to make it different sounding sounding.",
"The main thing I am trying to do is make it easy to type backwards. This is going to take experimentation. I am trying to make it slightly different sounding.",
"Is this vehicle of mercy?",
"God i forgot "
]
```
However, this involved using Docker. Docker is decent, but if I have the ability
not to, I don't want to use Docker. A friend of mine named `ckie` saw that I was
using Docker for this and decided to package the `gpt_2_simple` library [into
nixpkgs](https://github.com/NixOS/nixpkgs/pull/170713). They also made it easy
for me to pull it into robocadey's environment and then I ripped out Docker,
never to return.
Now the bot could fly. Here was the first thing it posted after it got online
with GPT-2 in a proper way:
<iframe src="https://botsin.space/@robocadey/108209326706890695/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="500"
height="175" height=allowfullscreen="allowfullscreen"></iframe>
I can't make this up.
## Art Gallery
Here are some of my favorite posts it's made. Most of them could pass off as my
tweets.
<iframe src="https://botsin.space/@robocadey/108209924883002812/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="190" allowfullscreen="allowfullscreen"></iframe>
<iframe src="https://botsin.space/@robocadey/108212424672000652/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="190" allowfullscreen="allowfullscreen"></iframe>
<iframe src="https://botsin.space/@robocadey/108215827551779879/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="210" allowfullscreen="allowfullscreen"></iframe>
<iframe src="https://botsin.space/@robocadey/108218889999336372/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="210" allowfullscreen="allowfullscreen"></iframe>
<iframe src="https://botsin.space/@robocadey/108218894030986305/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="800"
height="250" allowfullscreen="allowfullscreen"></iframe>
Some of them get somber and are unintentionally a reflection on the state of the
world we find ourselves in.
<iframe src="https://botsin.space/@robocadey/108219835651549836/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="280" allowfullscreen="allowfullscreen"></iframe>
<iframe src="https://botsin.space/@robocadey/108218522810351900/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="280" allowfullscreen="allowfullscreen"></iframe>
<iframe src="https://botsin.space/@robocadey/108217161432474717/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="345" allowfullscreen="allowfullscreen"></iframe>
<iframe src="https://botsin.space/@robocadey/108216170547691864/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="280" allowfullscreen="allowfullscreen"></iframe>
Others are silly.
<iframe src="https://botsin.space/@robocadey/108217116321450713/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="200" allowfullscreen="allowfullscreen"></iframe>
<iframe src="https://botsin.space/@robocadey/108218107689729996/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="200" allowfullscreen="allowfullscreen"></iframe>
<iframe src="https://botsin.space/@robocadey/108215257978801615/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="180" allowfullscreen="allowfullscreen"></iframe>
I say things like this:
<iframe src="https://pony.social/@cadey/108218301565484230/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
allowfullscreen="allowfullscreen"></iframe><script
src="https://pony.social/embed.js" async="async"></script>
and it fires back with:
<iframe src="https://botsin.space/@robocadey/108218304118515023/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="180" allowfullscreen="allowfullscreen"></iframe>
This is art. It looks like a robot pretending to be a human and just barely
passing at it. This helps you transform your expectations about what human and
bot tweets really are.
<iframe src="https://botsin.space/@robocadey/108213387014890181/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="200" allowfullscreen="allowfullscreen"></iframe>
If you want to influence `robocadey` into giving you an artistic experience,
mention it on the fediverse by adding `@robocadey@botsin.space` to your posts.
It will think a bit and then reply with a brand new post for you.
## Setting It Up
You probably don't want to do this, but if you're convinced you do then here's
some things that may help you.
1. Use the systemd units in `/run` of [github:Xe/x](https://github.com/Xe/x).
2. Put your model into a squashfs volume that you mount to the
`/var/lib/private/xeserv.robocadey-gpt2/checkpoint` folder.
3. Don't expect any warranty, reliability promises or assistance setting this
up. I made this for myself, not for others. Its source code is made available
to make the code part of that art, but the code is not the art that it makes.
Good luck.
---
I guess what I think about art is that it's not just the medium. It's not just
the expression. It's the combination of it all. The expression, the medium, the
circumstances, all of that leads into what art really is. I could say that art
is the intangible expressions, emotions, and whatever that you experience when
looking at things; but that sounds really really pretentious, so let's just say
that art doesn't exist. Well it does, but only in the mind of the viewer.
There's not some objective scale that can say that something is or is not an
art. Art is imagined and we are conditioned to believe that things are or are
not art based on our upbringing.
I feel that as a shitposter my goal is to challenge people's "objective sense"
of what "can" and "can't" be art by sitting right in the middle of the two and
laughing. Projects like `robocadey` are how I make art. It's like what 200 lines
of code at most. You could probably recreate most of it based on the contents of
this post alone. I wonder if part of the art here comes from the fact that most
of this is so iterative yet so novel. Through the iteration process I end up
creating novelty.
You could also say that art is the antidote to the kind of suffering that comes
from the fundamental dissatisfactions that people have with everyday life. By
that defintion, I think that `robocadey` counts as art.
Either way, it's fun to do these things. I hope that this art can help inspire
you to think differently about the world. Even though it's through a chatbot
that says things like this:
<iframe src="https://botsin.space/@robocadey/108215945151030016/embed"
class="mastodon-embed" style="max-width: 100%; border: 0" width="400"
height="200" allowfullscreen="allowfullscreen"></iframe>
What is this if not art?

View File

@ -10,7 +10,7 @@ As of [a recent commit](https://github.com/Xe/site/commit/b89387f6bbb010907dfa85
to this site's code, it now generates RSS and Atom feeds for future posts on my to this site's code, it now generates RSS and Atom feeds for future posts on my
blog. blog.
For RSS: `https://christine.website/blog.rss` For RSS: `https://xeiaso.net/blog.rss`
For Atom: `https://christine.webiste/blog.atom` For Atom: `https://christine.webiste/blog.atom`

View File

@ -38,7 +38,7 @@ my RTMP server. This means I could set it up to ingest via my [WireGuard
VPN][sts-wireguard] with very little work. Here is the docker command I run on VPN][sts-wireguard] with very little work. Here is the docker command I run on
my VPN host: my VPN host:
[sts-wireguard]: https://christine.website/blog/series/site-to-site-wireguard [sts-wireguard]: https://xeiaso.net/blog/series/site-to-site-wireguard
```console ```console
$ docker run \ $ docker run \

View File

@ -198,7 +198,7 @@ describes why functions fail to do what they intend. Rust has the [`Error`
trait](https://doc.rust-lang.org/std/error/trait.Error.html) which lets you also trait](https://doc.rust-lang.org/std/error/trait.Error.html) which lets you also
create a type that describes why functions fail to do what they intend. create a type that describes why functions fail to do what they intend.
In [my last post](https://christine.website/blog/TLDR-rust-2020-09-19) I In [my last post](https://xeiaso.net/blog/TLDR-rust-2020-09-19) I
described [`eyre`](https://docs.rs/eyre) and the Result type. However, this time described [`eyre`](https://docs.rs/eyre) and the Result type. However, this time
we're going to dive into [`thiserror`](https://docs.rs/thiserror) for making our we're going to dive into [`thiserror`](https://docs.rs/thiserror) for making our
own error type. Let's add `thiserror` to our crate: own error type. Let's add `thiserror` to our crate:

View File

@ -9,9 +9,9 @@ In this blogpost series I'm going to go over how I created a [site to site](http
This series is going to be broken up into multiple posts about as follows: This series is going to be broken up into multiple posts about as follows:
- Part 1 - Names and Numbers (this post) - Part 1 - Names and Numbers (this post)
- [Part 2 - DNS](https://christine.website/blog/site-to-site-wireguard-part-2-2019-04-07) - [Part 2 - DNS](https://xeiaso.net/blog/site-to-site-wireguard-part-2-2019-04-07)
- [Part 3 - Custom TLS Certificate Authority](https://christine.website/blog/site-to-site-wireguard-part-3-2019-04-11) - [Part 3 - Custom TLS Certificate Authority](https://xeiaso.net/blog/site-to-site-wireguard-part-3-2019-04-11)
- [Part 4 - HTTPS](https://christine.website/blog/site-to-site-wireguard-part-4-2019-04-16) - [Part 4 - HTTPS](https://xeiaso.net/blog/site-to-site-wireguard-part-4-2019-04-16)
- Setting up additional iOS, macOS, Android and Linux clients - Setting up additional iOS, macOS, Android and Linux clients
- Other future fun things (seamless tor2web routing, etc) - Other future fun things (seamless tor2web routing, etc)

View File

@ -6,10 +6,10 @@ series: site-to-site-wireguard
This is the second in my Site to Site WireGuard VPN series. You can read the other articles here: This is the second in my Site to Site WireGuard VPN series. You can read the other articles here:
- [Part 1 - Names and Numbers](https://christine.website/blog/site-to-site-wireguard-part-1-2019-04-02) - [Part 1 - Names and Numbers](https://xeiaso.net/blog/site-to-site-wireguard-part-1-2019-04-02)
- Part 2 - DNS (this post) - Part 2 - DNS (this post)
- [Part 3 - Custom TLS Certificate Authority](https://christine.website/blog/site-to-site-wireguard-part-3-2019-04-11) - [Part 3 - Custom TLS Certificate Authority](https://xeiaso.net/blog/site-to-site-wireguard-part-3-2019-04-11)
- [Part 4 - HTTPS](https://christine.website/blog/site-to-site-wireguard-part-4-2019-04-16) - [Part 4 - HTTPS](https://xeiaso.net/blog/site-to-site-wireguard-part-4-2019-04-16)
- Setting up additional iOS, macOS, Android and Linux clients - Setting up additional iOS, macOS, Android and Linux clients
- Other future fun things (seamless tor2web routing, etc) - Other future fun things (seamless tor2web routing, etc)
@ -230,7 +230,7 @@ $ dig @127.0.0.1 -x 10.55.0.1
### Using With the iOS WireGuard App ### Using With the iOS WireGuard App
In order to configure [iOS WireGuard clients](https://itunes.apple.com/us/app/wireguard/id1441195209?mt=8) to use this DNS server, open the WireGuard app and tap the name of the configuration we created in the [last post](https://christine.website/blog/site-to-site-wireguard-part-1-2019-04-02). Hit "Edit" in the upper right hand corner and select the "DNS Servers" box. Put `10.55.0.1` in it and hit "Save". Be sure to confirm the VPN is active, then open [LibTerm](https://itunes.apple.com/us/app/libterm/id1380911705?mt=8) and enter in the following: In order to configure [iOS WireGuard clients](https://itunes.apple.com/us/app/wireguard/id1441195209?mt=8) to use this DNS server, open the WireGuard app and tap the name of the configuration we created in the [last post](https://xeiaso.net/blog/site-to-site-wireguard-part-1-2019-04-02). Hit "Edit" in the upper right hand corner and select the "DNS Servers" box. Put `10.55.0.1` in it and hit "Save". Be sure to confirm the VPN is active, then open [LibTerm](https://itunes.apple.com/us/app/libterm/id1380911705?mt=8) and enter in the following:
``` ```
$ dig oho.pele $ dig oho.pele

View File

@ -6,10 +6,10 @@ series: site-to-site-wireguard
This is the third in my Site to Site WireGuard VPN series. You can read the other articles here: This is the third in my Site to Site WireGuard VPN series. You can read the other articles here:
- [Part 1 - Names and Numbers](https://christine.website/blog/site-to-site-wireguard-part-1-2019-04-02) - [Part 1 - Names and Numbers](https://xeiaso.net/blog/site-to-site-wireguard-part-1-2019-04-02)
- [Part 2 - DNS](https://christine.website/blog/site-to-site-wireguard-part-2-2019-04-07) - [Part 2 - DNS](https://xeiaso.net/blog/site-to-site-wireguard-part-2-2019-04-07)
- Part 3 - Custom TLS Certificate Authority (this post) - Part 3 - Custom TLS Certificate Authority (this post)
- [Part 4 - HTTPS](https://christine.website/blog/site-to-site-wireguard-part-4-2019-04-16) - [Part 4 - HTTPS](https://xeiaso.net/blog/site-to-site-wireguard-part-4-2019-04-16)
- Setting up additional iOS, macOS, Android and Linux clients - Setting up additional iOS, macOS, Android and Linux clients
- Other future fun things (seamless tor2web routing, etc) - Other future fun things (seamless tor2web routing, etc)
@ -26,7 +26,7 @@ A TLS Certificate Authority is a certificate that is allowed to issue other cert
### Why Should I Create One? ### Why Should I Create One?
Generally, it is useful to create a custom TLS certificate authority when there are custom DNS domains being used. This allows you to create `https://` links for your internal services (which can then act as [Progressive Web Apps](https://christine.website/blog/progressive-webapp-conversion-2019-01-26)). This will also fully prevent the ["Not Secure"](https://versprite.com/blog/http-labeled-not-secure/) blurb from showing up in the URL bar. Generally, it is useful to create a custom TLS certificate authority when there are custom DNS domains being used. This allows you to create `https://` links for your internal services (which can then act as [Progressive Web Apps](https://xeiaso.net/blog/progressive-webapp-conversion-2019-01-26)). This will also fully prevent the ["Not Secure"](https://versprite.com/blog/http-labeled-not-secure/) blurb from showing up in the URL bar.
Sometimes your needs may involve needing to see what an application is doing over TLS traffic. Having a custom TLS certificate authority already set up makes this a much faster thing to do. Sometimes your needs may involve needing to see what an application is doing over TLS traffic. Having a custom TLS certificate authority already set up makes this a much faster thing to do.

View File

@ -6,9 +6,9 @@ series: site-to-site-wireguard
This is the fourth post in my Site to Site WireGuard VPN series. You can read the other articles here: This is the fourth post in my Site to Site WireGuard VPN series. You can read the other articles here:
- [Part 1 - Names and Numbers](https://christine.website/blog/site-to-site-wireguard-part-1-2019-04-02) - [Part 1 - Names and Numbers](https://xeiaso.net/blog/site-to-site-wireguard-part-1-2019-04-02)
- [Part 2 - DNS](https://christine.website/blog/site-to-site-wireguard-part-2-2019-04-07) - [Part 2 - DNS](https://xeiaso.net/blog/site-to-site-wireguard-part-2-2019-04-07)
- [Part 3 - Custom TLS Certificate Authority](https://christine.website/blog/site-to-site-wireguard-part-3-2019-04-11) - [Part 3 - Custom TLS Certificate Authority](https://xeiaso.net/blog/site-to-site-wireguard-part-3-2019-04-11)
- Part 4 - HTTPS (this post) - Part 4 - HTTPS (this post)
- Setting up additional iOS, macOS, Android and Linux clients - Setting up additional iOS, macOS, Android and Linux clients
- Other future fun things (seamless tor2web routing, etc) - Other future fun things (seamless tor2web routing, etc)
@ -85,7 +85,7 @@ This will allow only Caddy and root to manage certificates in that folder.
### Custom CA Certificate Permissions ### Custom CA Certificate Permissions
In the [last post](https://christine.website/blog/site-to-site-wireguard-part-3-2019-04-11), custom certificates were created at `/srv/within/certs`. Caddy is going to need to have the correct permissions in order to be able to read them. In the [last post](https://xeiaso.net/blog/site-to-site-wireguard-part-3-2019-04-11), custom certificates were created at `/srv/within/certs`. Caddy is going to need to have the correct permissions in order to be able to read them.
```shell ```shell
#!/bin/sh #!/bin/sh

View File

@ -58,7 +58,7 @@ me](mailto:me@christine.website) and let me know them.
</noscript> </noscript>
I want to use [Xeact](https://christine.website/blog/xeact-0.0.69-2021-11-18) I want to use [Xeact](https://xeiaso.net/blog/xeact-0.0.69-2021-11-18)
more in my website. I am trying to hit a balance of avoiding structural more in my website. I am trying to hit a balance of avoiding structural
JavaScript while also allowing me to experiment with new and interesting ways of JavaScript while also allowing me to experiment with new and interesting ways of
doing things. To this end I have created a custom HTML element that allows me to doing things. To this end I have created a custom HTML element that allows me to

View File

@ -0,0 +1,101 @@
---
title: "Site Update: Hero Images"
date: 2022-06-08
---
For a while I've been wondering how I can add dramatic flair to my website with
so-called "hero images". These images are tools that let you describe the mood a
website wants to evoke. I've been unsure how to best implement these on my
website for a while, but with the advent of MidJourney and other image
generation APIs/algorithms I think I have found a way to create these without
too much effort on my part and the results are pretty fantastic:
<xeblog-hero file="secret-to-life" prompt="the secret to life, the universe and everything, concept art"></xeblog-hero>
I have generated a bunch of other images that I'm going to use for my other
posts. I'll give out a desktop wallpaper sized version of each of these images
on my [Patreon](https://patreon.com/cadey).
Under the hood this is powered by
[lol_html](https://github.com/cloudflare/lol-html) and
[Maud](https://maud.lambda.xyz/). The magic is mostly contained in a function
that generates a `<figure>` HTML element (which I just learned exists today). I
use a function that looks like this for generating the `<xeblog-hero>` snippets:
```rust
pub fn xeblog_hero(file: String, prompt: Option<String>) -> Markup {
html! {
figure.hero style="margin:0" {
picture style="margin:0" {
source type="image/avif" srcset={"https://cdn.xeiaso.net/file/christine-static/hero/" (file) ".avif"};
source type="image/webp" srcset={"https://cdn.xeiaso.net/file/christine-static/hero/" (file) ".webp"};
img style="padding:0" alt={"hero image " (file)} src={"https://cdn.xeiaso.net/file/christine-static/hero/" (file) "-smol.png"};
}
figcaption { "Image generated by MidJourney" @if let Some(prompt) = prompt { " -- " (prompt) } }
}
}
}
```
I have it wired up with lol_html like this:
```rust
lol_html::element!("xeblog-hero", |el| {
let file = el.get_attribute("file").expect("wanted xeblog-hero to contain file");
el.replace(&crate::tmpl::xeblog_hero(file, el.get_attribute("prompt")).0, ContentType::Html);
Ok(())
})
```
The result is that I can declare hero images with HTML fragments like this:
```html
<xeblog-hero file="miku-dark-souls" prompt="hatsune miku, elden ring, dark souls, concept art, crowbar"></xeblog-hero>
```
And I get this:
<xeblog-hero file="miku-dark-souls" prompt="hatsune miku, elden ring, dark souls, concept art, crowbar"></xeblog-hero>
<xeblog-conv name="Mara" mood="hacker">This is powered by the
[`<figure>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/figure)
tag, which is a new discovery to us. This is probably one of the most useful
tags we never knew about and removed the need to write a bunch of annoying CSS
and HTML.</xeblog-conv>
The webp and AVIF versions of the hero images have a higher resolution version
so that it looks nicer on retina screens. However, the png versions of these are
locked to a resolution of 800x356 pixels because I was unable to crush them
below a size of half a megabyte at full resolution. Realistically, this should
only affect older browsers on slower hardware, so I don't expect this to have
too much impact on most users.
<xeblog-conv name="Cadey" mood="coffee">If you don't want to see these hero
images, you can remove them with a userstyle like this:
```css
figure.hero {
display: none;
}
```
</xeblog-conv>
I'm likely going to convert over most of my website templates to use Maud. I'm
very happy with it and I think it is incredibly useful to express your HTML in
Rust instead of something that has to be compiled to Rust. In practice it
reminds me of the Nim library [emerald](http://flyx.github.io/emerald/), which
lets you write HTML using Nim functions similar to how you use Maud.
Here's a few more examples of hero images I have generated:
<xeblog-hero file="the-forbidden-shape" prompt="the forbidden shape"></xeblog-hero>
<xeblog-hero file="great-wave-cyberpunk" prompt="the great wave off of kanagawa, cyberpunk, hanzi inscription"></xeblog-hero>
Normally I will only have one image per post and it will usually be after the
introduction paragraph. The prompt will usually be related to the article topic,
but sometimes I will take artistic liberty. If you have suggestions for prompts,
please [contact me](/contact) with those ideas.
I hope these updates on how I've been messing with my site are interesting. I'm
trying to capture the spirit of how I'm implementing these changes as well as
details of how everything fits together.

View File

@ -0,0 +1,117 @@
---
title: "Site Update: I Fixed the Patron Page"
date: 2022-05-18
---
So I fixed [the patron page](https://xeiaso.net/patrons) and the
underlying issue was stupid enough that I feel like explaining it so you all can
learn from my mistake.
<xeblog-conv name="Numa" mood="delet">For those of you playing the christine dot
website home game, look
[here](https://github.com/Xe/site/commit/e2b9f384bf4033eddf321b5b5020ac4847609b37)
to see the fix and play along!</xeblog-conv>
My blog is basically a thin wrapper around two basic things:
1. Markdown files (such as for this article you are reading right now)
2. Static files (such as for the CSS that is making this article look nice)
When I create a package out of my blog's code, I have a layout that resembles
the directory structure in my git repo:
```console
$ ls -l /nix/store/crc94hqyb546w3w9fzdyr8zvz3xf3p1j-xesite-2.4.0
total 64
dr-xr-xr-x 2 root root 4096 Dec 31 1969 bin/
dr-xr-xr-x 2 root root 20480 Dec 31 1969 blog/
-r--r--r-- 24 root root 8663 Dec 31 1969 config.dhall
dr-xr-xr-x 2 root root 4096 Dec 31 1969 css/
dr-xr-xr-x 2 root root 4096 Dec 31 1969 gallery/
-r--r--r-- 52 root root 5902 Dec 31 1969 signalboost.dhall
dr-xr-xr-x 12 root root 4096 Dec 31 1969 static/
dr-xr-xr-x 2 root root 4096 Dec 31 1969 talks/
```
Here is my git repo for comparison:
```console
$ ls -l
total 188
drwxr-xr-x 2 cadey users 20480 May 18 20:21 blog/
-rw-r--r-- 1 cadey users 77521 May 18 20:15 Cargo.lock
-rw-r--r-- 1 cadey users 1795 May 18 20:15 Cargo.toml
-rw-r--r-- 1 cadey users 198 Oct 30 2020 CHANGELOG.md
-rw-r--r-- 1 cadey users 2779 Apr 5 20:32 config.dhall
drwxr-xr-x 2 cadey users 4096 Apr 16 11:56 css/
-rw-r--r-- 1 cadey users 1325 Jan 15 2021 default.nix
drwxr-xr-x 2 cadey users 4096 Mar 15 2020 docs/
drwxr-xr-x 2 cadey users 4096 Mar 21 20:23 examples/
-rw-r--r-- 1 cadey users 1882 Apr 30 16:13 flake.lock
-rw-r--r-- 1 cadey users 6547 Apr 24 20:35 flake.nix
drwxr-xr-x 2 cadey users 4096 Jun 17 2020 gallery/
drwxr-xr-x 6 cadey users 4096 Mar 21 20:23 lib/
-rw-r--r-- 1 cadey users 887 Jan 1 2021 LICENSE
drwxr-xr-x 2 cadey users 4096 Dec 18 00:06 nix/
-rw-r--r-- 1 cadey users 1467 Feb 21 20:39 README.md
drwxr-xr-x 2 cadey users 4096 Mar 21 21:21 scripts/
-rw-r--r-- 1 cadey users 5902 May 18 16:44 signalboost.dhall
drwxr-xr-x 5 cadey users 4096 Apr 5 20:32 src/
drwxr-xr-x 12 cadey users 4096 Jan 10 17:22 static/
drwxr-xr-x 2 cadey users 4096 Nov 10 2021 talks/
drwxr-xr-x 4 cadey users 4096 Apr 16 09:56 target/
drwxr-xr-x 2 cadey users 4096 May 15 07:59 templates/
```
The main problem is that my site expects all of this to be in the current
working directory. In my site's systemd unit I have a launch script that looks
like this:
```nix
script = let site = packages.default;
in ''
export SOCKPATH=${cfg.sockPath}
export DOMAIN=${toString cfg.domain}
cd ${site}
exec ${site}/bin/xesite
'';
```
However the Nix store isn't writable by user code. My patreon API client looked
for its credentials in the current working directory. When I set it up on the
target server I put the credentials in `/srv/within/xesite/.patreon.json`,
thinking that the `WorkingDirectory` setting would make it Just Work:
```nix
WorkingDirectory = "/srv/within/xesite";
```
But this was immediately blown away by the `cd` command on line 4 of the script.
I have fixed this by making my Patreon client put its credentials in the home
directory explicitly with this fragment of code:
```rust
let mut p = dirs::home_dir().unwrap_or(".".into());
p.push(".patreon.json");
```
This will make the Patreon credentials get properly stored in the service's home
directory (which is writable). This will also make the patrons page work
persistently without having to manually rotate secrets every month.
Here's a good lesson for you all, make sure to print out the absolute path of
everything in error messages. For the longest time I had to debug this from this
error message:
```
patrons: xesite::app: ".patreon.json" does not exist
```
I was looking at the directory `/srv/within/xesite` and I saw it existing right
in front of my eyes. This made me feel like I was going crazy and I've been
putting off fixing it because of that. However, it's a simple fix and I was
blind.
<xeblog-conv name="Cadey"
mood="coffee">aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa</xeblog-conv>

View File

@ -0,0 +1,42 @@
---
title: "Site Update: Salary Transparency Page Added"
date: 2022-06-14
author: Sephie
---
<xeblog-hero file="miku-dark-souls" prompt="hatsune miku, elden ring, dark souls, concept art, crowbar"></xeblog-hero>
I have added a [salary transparency
page](https://xeiaso.net/salary-transparency) to the blog. This page lists my
salary for every job I've had in tech. I have had this data open to the public
for years, but I feel this should be more prominently displayed on my website.
As someone who has seen pay discrimination work in action first-hand, data is
one of the ways that we can end this pointless hiding of information that leads
to people being uninformed and hurt by their lack of knowledge. By laying my
hand out in the open like this, I hope to ensure that people are better informed
about how much money they can make, so that they can be paid equally for equal
work.
Raw, machine processable data (including employer names) is available at
`/api/salary_transparency.json`. The JSON format is not stable. Do not treat it as
such. I reserve the right to change the formatting or semantics of the JSON
format at any time without warning. The raw data is in `/dhall/jobHistory.dhall`
in my site's git repository.
I have also taken the time to make sure that the [old
post](https://xeiaso.net/blog/my-career-in-dates-titles-salaries-2019-03-14)
maintains an up-to-date list. I do not want to break semantics on my website
without a very good reason. By leaving the old post un-updated, I feel it would
be doing a disservice to the community.
Please consider publishing your salary data like this as well. By open,
voluntary transparency we can help to end stigmas around discussing pay and help
ensure that the next generations of people in tech are treated fairly. Stigmas
thrive in darkness but die in the light of day. You can help end the stigma by
playing your cards out in the open like this.
It can be scary to do this; however every person that does it will make it that
much more easy for the next person to do it.
Don't be afraid.

View File

@ -8,13 +8,13 @@ tags:
--- ---
I made a little interactive fiction story! You can find it I made a little interactive fiction story! You can find it
[here](https://christine.website/static/stories/spaceship.html). This was [here](https://xeiaso.net/static/stories/spaceship.html). This was
written as a result of a terrible idea I had after watching some QuakeCon written as a result of a terrible idea I had after watching some QuakeCon
announcements. announcements.
I wonder if I can get away with using an `<iframe>` in 2021: I wonder if I can get away with using an `<iframe>` in 2021:
<iframe src="https://christine.website/static/stories/spaceship.html" width="100%" height=500></iframe> <iframe src="https://xeiaso.net/static/stories/spaceship.html" width="100%" height=500></iframe>
This is adapted from [a twitter This is adapted from [a twitter
thread](https://twitter.com/theprincessxena/status/1428479144699088903). thread](https://twitter.com/theprincessxena/status/1428479144699088903).

View File

@ -0,0 +1,68 @@
---
title: "Spearphishing: it can happen to you too"
date: 2022-07-09
tags:
- linkedin
- infosec
---
<xeblog-hero file="the-fool" prompt="The Fool in a woodcut tarot card style"></xeblog-hero>
For some reason, LinkedIn has become the de-facto social network for
professionals. It is viewed as a powerful networking and marketing site that
lets professionals communicate, find new opportunities and source talent at
eye-watering speed and rates. However, at the same time this also means that
LinkedIn becomes a treasure trove of data to enable spearphising attacks.
Let's consider [this attack against popular "play to earn" game Axie
Infinity](https://www.theblock.co/post/156038/how-a-fake-job-offer-took-down-the-worlds-most-popular-crypto-game).
The attackers had PDF based malware that allowed them to get access to a target
computer, so they needed someone to open a PDF to trigger the exploit chain that
let them gain a foothold. But they specifically wanted people that likely had
access to the crypto wallets that enable control of the blockchain. LinkedIn let
them filter by employees at the company behind Axie Infinity that were
developers and likely started spearphishing by role and seniority. The details
of the attack spell out that the attackers had set up a whole fake interview
process to convince the marks that the process was legitimate and they put the
malware in the offer letter. The attackers later gained access to the validator
wallets and then they were able to make off with over half a billion dollars
worth of cryptocurrency.
<xeblog-conv name="Numa" mood="delet">Maybe, just maybe you shouldn't store a
majority of the keys required to validate something on _the same computer_.
Especially if those keypairs control assets worth close to _half a billion
dollars_. Holy heck.</xeblog-conv>
The malware was in the offer letter. This is the kind of social engineering
attack that I bet any one of you reading this article could fall for. Hell, I'd
probably fall for this. This may be the wrong kind of take to have, but I'm
really starting to wonder if using LinkedIn so much is actually bad for
security. It's not just recruiters reading through LinkedIn anymore, it's also
threat actors that are trying to break in and do God knows what. Maybe we as an
industry should stop feeding all of that data into LinkedIn. Not only would it
give you less recruiter spam, maybe it'll make spearphishing attacks more
difficult too.
<xeblog-conv name="Cadey" mood="coffee">Also, yes we can't trust PDFs anymore,
especially after exploits like
[FORCEDENTRY](https://googleprojectzero.blogspot.com/2021/12/a-deep-dive-into-nso-zero-click.html)
became a thing.</xeblog-conv>
Either way, I may end up getting a disposable machine for dealing with reading
PDFs from unknown sources in the future. I could use a virtual machine for this,
but if my threat model includes PDFs having exploits in them then I probably
can't trust a virtual machine to be a reasonable security barrier. I don't know.
It sucks that we can't trust people anymore.
I kinda wish we could.
---
<xeblog-conv name="Mara" mood="hacker">Fun fact: the tarot card "The Fool"
doesn't actually imply idiocy in a malicious way. The major arcana of the tarot
is a bunch of memes that describe the story of The Fool's journey through magick
and learning how the world works. The Fool is not an idiot, The Fool is just
someone that is unaware of the difficulties they are going to face in life and
treats things optimistically. Think a free spirit as opposed to someone that is
foolhardy (though foolhardiness is the meaning of The Fool when the card is
inverted).</xeblog-conv>

View File

@ -26,7 +26,7 @@ problems that require deep thought and consideration.
I was originally gonna release this by the end of the year as a cohesive novel, I was originally gonna release this by the end of the year as a cohesive novel,
however it looks like the cards aren't falling that way. I want to instead shift however it looks like the cards aren't falling that way. I want to instead shift
[Spellblade](https://christine.website/blog/spellblade-plans-2021-08-16) into a [Spellblade](https://xeiaso.net/blog/spellblade-plans-2021-08-16) into a
web novel, which I am defining as something that I'll release in big chunks like web novel, which I am defining as something that I'll release in big chunks like
this every month or so. I don't want to compromise any of the artistic vision or this every month or so. I don't want to compromise any of the artistic vision or
whatever, I just want each "chunk" to be a lot more finely scoped than "the whatever, I just want each "chunk" to be a lot more finely scoped than "the

View File

@ -64,7 +64,7 @@ def get_feed(feed_url):
con.commit() con.commit()
print("got feed %s" % (feed_url)) print("got feed %s" % (feed_url))
get_feed("https://christine.website/blog.json") get_feed("https://xeiaso.net/blog.json")
``` ```
So now let's play with the data! Let's load the database schema in with the So now let's play with the data! Let's load the database schema in with the
@ -76,14 +76,14 @@ $ sqlite3 data.db < schema.sql
[The less-than symbol there is a redirect, it loads the data from `schema.sql` [The less-than symbol there is a redirect, it loads the data from `schema.sql`
as standard input to the `sqlite` command. See <a as standard input to the `sqlite` command. See <a
href="https://christine.website/blog/fun-with-redirection-2021-09-22">here</a> href="https://xeiaso.net/blog/fun-with-redirection-2021-09-22">here</a>
for more information on redirections.](conversation://Mara/hacker) for more information on redirections.](conversation://Mara/hacker)
Then run that python script to populate the database: Then run that python script to populate the database:
```console ```console
$ python ./jsonfeedfetch.py $ python ./jsonfeedfetch.py
got feed https://christine.website/blog.json got feed https://xeiaso.net/blog.json
``` ```
Then open up the SQLite command line: Then open up the SQLite command line:
@ -179,7 +179,7 @@ And run that python script again, then the data should automatically show up:
``` ```
sqlite3> SELECT * FROM jsonfeed_metadata; sqlite3> SELECT * FROM jsonfeed_metadata;
https://christine.website/blog.json|Xe's Blog|My blog posts and rants about various technology things.|https://christine.website|2022-01-04 https://xeiaso.net/blog.json|Xe's Blog|My blog posts and rants about various technology things.|https://xeiaso.net|2022-01-04
``` ```
It's like magic! It's like magic!

View File

@ -0,0 +1,106 @@
---
title: "The Stanley Parable: Ultra Deluxe Review"
date: 2022-07-25
series: reviews
---
Every so often a game comes around that is genuinely hard to review. Especially
when you are trying to avoid spoiling the magic of the game in that review. This
is a game that is even harder to review than normal because it's an absolute
philosophical document. This game absolutely riffs at the games industry super
hard and it really shows. I'm going to try to avoid spoilers in this article,
except for a few I made up.
<xeblog-conv name="Cadey" mood="coffee">I was going to include screenshots in
this article, but it's difficult for me to get them without spoiling the subtle
comedy at hand, so I'm going to leave this as a text-only review.</xeblog-conv>
The Stanley Parable: Ultra Deluxe is either the second or third game in the
series. At first this game was a Half Life 2 mod that came out of nowhere and
was one of the most beloved mods ever released. Then they made it a proper game
on the Source engine and expanded it a bit. After a while they wanted to
continue the parable and expand it even more, but they weren't able to get it on
consoles with it still being a Source engine game. So they ported it to Unity
and the end result is The Stanley Parable: Ultra Deluxe. It is one of my
favorite games of all time.
It is a deeply limited game, you only can move around and interact with things.
The story is about an office drone named Stanley that pushes buttons based on
instructions from his computer. The big thing that this game does though is make
you realize the inherent paradoxes in its own design.
<xeblog-conv name="Mara" mood="happy">Being mechanically limited like this is
not actually a bad thing like the phrasing might imply. This means that the main
focus of the gameplay is not on the micro actions the player can take. In this
case the main focus is on how the player interacts with the story and not how
the player interacts with their controller or puzzles or tactics. Additionally,
the mechanical limitations of the gameplay are thematically aligned with the
story's premise of being an office drone in ways it can play with. Think
dramatic irony taken to its logical conclusion.</xeblog-conv>
Endings that make you look like you had exercised your free will actually boil
down to your actions being controlled by following the narrator's voices. This
is absolutely taking the piss out of how most modern AAA game design works,
guiding you with an invisible hand and making it _seem_ like you had the free
will to choose what was going on when in fact you were really just following the
invisible guidance the whole time.
However I think one of the best examples of how The Stanley Parable riffs at
mainstream game design is via the Adventure Line™️ that shows up in one branch of
the game. The Line™️ is an obvious riff on games like Dead Space where you can
summon a line to tell you where to go at any time. It shows how _boring_ modern
game design is by making you _see_ the consequences of it. If you follow the
narrator's voice, you get boring endings.
In many modern AAA games, you have the free will to choose to follow the main
story and finish all the quests or whatever, but not much else. Consider Call of
Duty or Battlefield. You are John America and you have to kill the enemies to
death before they kill you to death by throwing bullets at you. You get to the
end of the level and blow up the brown people some more or something and then
it's suddenly a victory for America. But what did you really accomplish? You
just followed the line. Walk outside of the intended playable area? 10 second
timer until the game kills you. Shoot a person with the wrong skin color? The
game kills you.
<xeblog-conv name="Numa" mood="delet">If you manage to clip out of bounds in the
escape ending, the screen will fade to black and you will be transported to a
temperate climate. Then a t-posing model in terrible armor will tell you that it
used to be an adventurer until they took an arrow to the knee. Hope that's not a
marriage proposal!</xeblog-conv>
However in The Stanley Parable you can defy the narrator and that's where the
game really opens up. It's great to get in the area where the game is unfinished
and then have the narrator complain about deadlines, scheduling delays, investor
funding and them wanting to avoid having to stuff it to the gills with
microtransactions. You can legitimately glitch your way out of bounds and then
the game will reward you with a new ending you didn't know was possible. The
game takes the concept of the illusion of free will and plays with it.
The game makes you think about what games _can_ be. It makes you wonder if the
potted plant soliloquy after the broom closet ending speaks to the mental state
of the author more than anything. Of all of the artistic endeavors that games as
a medium _can_ have, we end up seeing very few or none of them in mainstream
gaming. Sure you get your occasional 4k120fps robot killer waifu with a bow and
a whacky stick, but none of it really _revolutionizes_ video games as an art
form. It's all just derivative of the generic "unalive bad guy and save earth"
trope.
<xeblog-conv name="Mara" mood="hacker">If you want some games that really
revolutionize what games can be, check out
[Celeste](https://mattmakesgames.itch.io/celeste), [Secret Little
Haven](https://ristar.itch.io/secret-little-haven), [Baba Is
You](https://hempuli.itch.io/baba), and [Glittermitten
Grove](https://twinbeard.itch.io/glittermitten-grove). All of these games really
challenge what games can be and experiment with radically different kinds of
art. You never will see mainstream games be as risk-taking as this because art
is fundamentally risky and capitalism wants line to go up, so they go out of
their way to make sure that mainstream games are as safe and likely to sell many
copies as possible.</xeblog-conv>
I made up the thing about the potted plant, but if you had played the game then
you'd probably have started the game up to look for it just to see what was
there. I wonder if I made someone stand at that potted plant for like 5 minutes
or something. This game sparks creativity in ways that other mainstream games
just fundamentally don't. If you've been looking for something different in your
video game diet, I really suggest you give it a try. Go in as blind as possible.
I'm not paid in any way to say this, I genuinely think this is really good.

View File

@ -4,7 +4,7 @@ date: 2019-05-30
series: templeos series: templeos
--- ---
The [last post](https://christine.website/blog/templeos-1-installation-and-basic-use-2019-05-20) covered a lot of the basic usage of TempleOS. This post is going to be significantly different, as I'm going to be porting part of the TempleOS kernel to WebAssembly as a live demo. The [last post](https://xeiaso.net/blog/templeos-1-installation-and-basic-use-2019-05-20) covered a lot of the basic usage of TempleOS. This post is going to be significantly different, as I'm going to be porting part of the TempleOS kernel to WebAssembly as a live demo.
This post may contain words used in ways and places that look blasphemous at first glance. No blasphemy is intended, though it is an unfortunate requirement for covering this part of TempleOS' kernel. It's worth noting that Terry Davis [legitimately believed that TempleOS is a temple of the Lord Yahweh](https://templeos.holyc.xyz/Wb/Doc/Charter.html): This post may contain words used in ways and places that look blasphemous at first glance. No blasphemy is intended, though it is an unfortunate requirement for covering this part of TempleOS' kernel. It's worth noting that Terry Davis [legitimately believed that TempleOS is a temple of the Lord Yahweh](https://templeos.holyc.xyz/Wb/Doc/Charter.html):

344
blog/the-oasis.markdown Normal file
View File

@ -0,0 +1,344 @@
---
title: The Oasis
date: 2022-06-03
series: malto
tags:
- furry
---
Tosen did a final check of his backpack. It was a very hot day in Tashei, but
the river radiated an aura of cool air that protected everyone from the heat of
the harsh sun. He had his backup cloak, a hydroflask and some fish jerkey, not
to mention the package for the client. *Not exactly the best equipment but this
will work. Riltash is a half day away at worst,* Tosen thought to himself. He
squatted down and fit his arms into the pack's straps. The pack easily weighed a
quarter of what he did, but as he regained his catlike balance he secured the
waistband and got ready to head out.
The oracle predicted that there would be a sandstorm late in the evening, but it
wasn't even noon yet. He pulled out his compass, let it settle and then set out
to the southeast.
Walking in the desert always has its own unique rhythm to it. With the
unrelenting heat of the sun pounding down on the sand, the ground itself can
feel like a million angry daggers with every step. Tosen thought ahead of this
issue. He got himself a pair of sandshoes from the fancy magic item store. The
only downside was that his main connection to the earth was significantly
weakened. Chee paws are some of the most finely tuned sensory organs on Malto
(second only to Snep paws), and they were his main warning about sandstorms.
*The oracle isn't wrong most of the time. I'm fine, I'm fine. I can't feel the
desert but I'm fine.*
He kept walking past all different kinds of cacti. His favorite ones were the
ones that were made up of a bunch of spiky ovals built on top of eachother. He'd
never want to get stung by one and risk the wrath of the serrated needles, but
he'd always thought that they had such a unique look. *If I had a house of my
own, I'd grow one of them.*
As he continued walking he started to focus on the patterns of walking. Every
step was taken one after the other. With every step, his foot slid to the side
ever so slightly. The sand wrapped around his shoes and warmed his feet. The
worst part of the sandshoes was when sand trickled into the back of them. This
required him to stop every so often to purge the sand out of his shoes, because
otherwise it would hurt a lot.
This continued for what felt like hours. He checked his compass every so often
and made sure he was on the right path. It started to move a bit weird compared
to normal. It was taking longer to find north. Normally this would be concerning
to him, but the desert had entranced him. Left, right, left, right, left, right.
Each step bringing him closer to his destination.
Then the sky changed color. The brilliant blue started to get stained with a
light brown that worked its way across what Tosen could see. Tosen instantly
noticed this change and pulled over his face veil. The sandstorm had started
early. The oracle was wrong.
Tosen looked around for some kind of shelter but all he could see was the
remains of a broken wagon that looked like its better days had seen better days.
It was barely enough protection. With his spare robe to patch over the biggest
holes just enough to ride out the storm. He took the leap and hunkered down.
*It's only a level 2 storm. It'll be over in an hour. I'll make it to Riltash
today. Everything will work out.*
As he sat down he reached for his compass and couldn't find it. He reached into
the pocket that his compass normally lives in and felt it conspicuously empty.
He looked up towards the path he walked in on and saw a golden glint in the
sand. It was so close. If he could get to it, he'd know where to go. He'd find
his way to Riltash.
But the sandstorm started to kick up. The sky started fading towards darker and
darker shades of brown and he could feel the sand beat against his makeshift
shelter. The hot sand was whipped up and he could hear it pitter and patter the
wooden and cloth walls.
After an hour, the sandstorm started showing signs of slowing down. *This is
nothing close to a level 2*, Tosen thought to himself. His spare robe was
totally ruined, but he survived. As things died down, he remembered his compass
and tore down enough of his shelter to be able to find it. It wasn't where it
was before. *Okay, it's made of gold, it can't have gone that far*. He grabbed
his pack, almost fell over from the sudden weight and started to scan around him
in 360 degrees. He saw the familiar glint of its knob and walked over to its
resting site. The looking glass was cracked. Rotating it did nothing. The
compass was broken.
He was lost.
It took every ounce of strength Tosen had to avoid shouting out in anger. He
needed to conserve the water. Miau was huge. He needs to extend his supplies to
last as long as possible.
He couldn't resist the urge. He shouted out in anger for an instant before
realizing what he did and covering his mouth.
It took a while for Tosen to regain his senses. The shock of the event wasn't
sitting well with him. His mother's compass was destroyed. His rendezvous time
with the client was surely shot. At least it wasn't the solar apex anymore.
*Okay, I can deal with this. I should stay put until sunset. The sun sets to the
east. I can go diagonally into the sunset to get to Riltash*.
He more confidently went back to his makeshift shelter. It was in worse
condition after the storm, but at least it would give him shade. The sun was on
its way down, but it was still a deadly laser that he needed to worry about.
*It's just me and you, buddy.*
Some time passed and the sun very visibly was in the eastern portion of the sky.
Tosen grabbed for his hydroflask and took a sip. *It was still cold. At least
that oracle was good for SOMETHING.* He stood up and grabbed his pack. He left a
bit of red cloth as a flag on the southeast side of his makeshift shelter to
tell anyone looking for him where he went.
Then it was back to the rhythms of the desert. The desert felt confusing without
the comforting pulse of nature under his paws. But, he continued taking steps
and continued walking forwards.
Left. Right. Left. Right. Left. Right. Things felt more deliberate this time.
There was a frustration to his walking. He was so frustrated at the whole
situation. As he walked, he felt his emotions fuming over this whole debacle.
He walked and walked. The sunset had started to peek out its head and show Tosen
a display of fantastic colors as he continued to walk. *This isn't right. I
should have reached Riltash by now. It's only a few miles from Tashei.* He took
another swig of his hydroflask and felt it notably lighter than it should be. He
was low on water. This was especially dangerous out here. *This is going to be a
long night, isn't it.*
The sunset continued and the colors gradually started to fade to the black night
sky. Starts started to peek out without the sun to hide them. Tosen scanned over
the constellations and found the North Star. From there he worked out that he
was going to the southeast like he thought he was. He looked around and found a
few miserable bushes to use for firewood, but they were in a sea of thorns. He
had talents in fire magick, but he didn't trust using it with so many dry thorns
nearby. *You know what, what's the worst that can happen? I get warm? It's going
to be so cold soon, I need to do something.*
He held out his hand and mentally started to trace out the triangles like he
learned from school. Each triangle stacked on top of each other and then built
up into a viable casting circle glowing a brilliant orange in front of his hand.
The area around him was illuminated from the magickal force, the thorns casting
long evil shadows against cacti and other miserable little bushes.
"Toor sha!"
A weak puff of flame came out of his hand and tickled one of the thorny vines.
There wasn't much of a response and it looked like the fire was going to go out
so he cast a fireball in its place. The triangles shifted into squares and a
baseball sized orb of energy started to form in his hand.
"Toor shaltel!"
The fireball formed around his fingers and he chucked it right into the pit of
thorns. They were all set on fire simultaneously. After a brilliant blaze, the
fire petered out into nothing as fast as it started. He looked over to see if
the firewood was still there, his spell circle was still active as a flashlight
but that kindling was nowhere to be seen. It was incinerated with everything
else.
He had to resist shouting out in anger again. *Okay, okay, calm down. I set off
a massive signal fire. That should alert someone. I can't keep this spell circle
up, I'll mind down and then I'll be in worse trouble.* He killed off the spell
with a flick of the wrist and the darkness crept in. He was alone. I need to
keep walking. So he started walking, not realizing that he changed direction
after the incident with the thorns.
Left! Right! Left! Right! Left! Right! Each step felt angry and defiant. Tosen
started to feel legitimate anger at the desert. It was normally his home, he
grew up in the sands of Miau, but tonight it was his enemy. He defiantly marched
towards where he thought Riltash was, but got no closer.
It was a very long night walking towards town. It was a desperate, angry march.
He stopped a few times to take a bite of jerkey and swig it down with the bare
minimum of water he could get away with. He thought it would be colder, but it
turns out all that fur ended up going towards something.
The night continued and was broken by the inklings of a sunrise. Tosen looked up
in dismay. He had walked all night and he was nowhere closer to his destination.
An overwhelming feeling of sadness blanketed him and he broke down to start
crying.
He looked forward and saw something different. He saw what looked like the faint
outline of Riltash's signal statue. His sadness was instantly transmuted to a
mixture of relief and joy and his second wind started to hit. He trudged forward
towards that statue. Towards his salvation. Towards his client. Towards his
paycheck. Towards the next step to move out to Zhalram with his friends. Towards
his future.
He kept up his pace and got closer. The statue looked wrong. Riltash has the
visage of one of the water goddesses in the region. This looked different,
almost like a Chee. He wasn't aware of any local Chee deities. He looked down
and saw shimmers. It almost looked like a mirage, but then he remembered
something. There were rumors of an oasis south of Riltash. Could this be that?
Could there be water?
His second wind became a third and then a fourth wind. He got close enough to
take a better look at everything and it was that oasis!
*Water!*
His walk became a sprint and the sand started to be diluted with grass. As he
walked on the grass he suddenly felt an overwhelming sense of calm. It was as if
all of the anger, all the vitriol, all the hatred towards the desert vanished in
an instant. He paused for a moment but then continued on. The promise of fresh
water was too great. He was so thirsty.
He put his pack down, took off his shoes and tested the water with a paw. It was
cool to the touch, about 10 degrees celsius. It was the real deal. It was water.
He took off his robe, folded it haphazardly next to his pack and grabbed his
hydroflask. He opened it and shoved it under so it could be filled. Once he was
satisfied that it was full, he bent over and started to lap up the water
greedily.
A figure vaguely resembling the statue was watching from a nearby house. The
figure chuckled to themselves. They decided they should intervene. They donned a
white robe and walked out to the weary traveler.
Tosen was enamoured by the water. His exhaustion had finally caught up to him.
He looked back at his pack and saw a figure walking towards him from some kind
of house. He instantly jumped to alertness, but didn't feel the fear that
generally came with being startled like that. The figure felt familiar yet alien
somehow.
The figure looked at his pack and his visibly broken compass. They looked right
into Tosen's eyes. "Rough day?"
Tosen stammered a few times and eventually managed to come up with a reply:
"Y...yeah. I was caught in that sandstorm yesterday. I hid from it in a broken
wagon."
The figure reached out a hand to him. "Come with me. You need a rest. I'll come
back to take care of your things. I have a spare bed for travelers like you."
Tosen didn't have enough energy to argue with the stranger's offer of
hospitality and followed them into their house. They guided Tosen to the guest
room and sat on one of the chairs. Tosen collapsed on the most comfortable bed
he had ever felt in his life. All he could get out was a weak "thank....youuuu"
before his lost sleep caught up and he was out like a light. The figure pulled a
blanket over him and closed the shades to make the room nice and dark.
Tosen was asleep until the late afternoon. The figure had moved his stuff
inside, done his laundry, mended a hole in the pack and was lounging in a chair
for a nap of their own.
Tosen woke up, stretched out and yawned loudly. He looked up at the ceiling and
realized how unfamiliar it was. That angel in his dream was real. Had he
actually walked through the night? The figure knocked at the door. "Hey, come
and have a meal. You must be starving." He was. Tosen stood up and opened the
door. The figure was wearing a white robe and a golden necklace. They looked
like the archetypal vision of Chee beauty. Tosen noted that he was unable to
refer to that figure with any pronoun but "they". *That's weird...*
The figure started to speak: "I am Shal'tash. I saw you hurting and I decided to
intervene and help you. Come, I have some food almost ready." Shal'tash started
to walk towards the kitchen and Tosen followed. He made his way to a rather
ordinary looking wooden table and took a seat. His stuff was near the table and
he was grateful for his host's gratuity.
They were making pancakes. The batter was being poured into a metal pan in
little groups. Tosen noticed that the stove seemed to be powered by its own
magic circle, a non-organic magic emitter was being used to create the fire
needed for cooking. It was a weak burner, but it was enough for Shal'tash to
cook with.
Tosen was befuddled. He had never seen such a thing in action. He got up and
looked at it closely. Shal'tash looked back and smiled, "Never seen a stove
burner before?"
"Not like that no, it looks like it's casting a weak fire spell, but
constantly."
"This is a lot more efficient than the coal burning stoves you have. This lets
you use the energy equivalent of a fireball to get a half hour of cooking heat,
or an hour or two of torchlight. I'm surprised you didn't know about this."
Tosen looked confused. "You mean you can use the square level spell to
supercharge the triangle level spell? No, they never taught us this. But how is
the burner even working?"
Shal'tash laughed. "It's nothing special. I just rooted the circle under the pan
instead of on my hand. Here, you try it." They flicked their wrist and banished
the magic circle. "Now cast a fireball but focus on the pan instead of your
hand, let it simmer a bit, and then kick off create fire."
Tosen was confused but nodded and tried to comply. After a moment Shal'tash
piped in: "no, don't think about where the pan is relative to your hand. Think
about where the pan is relative to the pan. You're so close. I know you can do
it."
Tosen nodded and started over. The circle started to be inscribed below the pan
and Shal'tash's face lit up like a Christmas tree. "Toor sha!"
The burner was lit. The fire was continuously burning and Tosen didn't feel the
sting of a continuous cast. "Perfect. See how easy this is? Spend the mana on
the fireball, then use it for the weaker spell. No need to waste any."
Tosen was astounded. It normally took him ages to learn magical skills, but here
he was on the second try with this person and their vague instruction and he did
two things he thought was impossible. It was like magic was all new all over
again. Can I use this to make a bunch of fireballs when casting a firestorm? How
far does this go?
"Be careful with this, you could really hurt someone if you do displacement
foolishly. They must have stopped teaching it for a reason." Shal'tash finished
the stack of pancakes and put the plate in the middle of the table. "Now let's
eat!"
They shared a meal. It was just what Tosen needed.
The meal was finished. Shal'tash looked over to Tosen. "So where are you headed?
I can point you in the right direction."
"Riltash, I have a delivery that I'm incredibly late for by now."
Shal'tash chuckled and pointed towards the statue. "The statue points towards
Riltash. Just go straight north and you'll get there in 20 minutes."
Tosen looked incredulous. "I was really that close?"
"Yeah, though it looked like you needed to get lost. It can be good for you."
He didn't understand what they meant by that, but he didn't think he needed to.
Shal'tash walked with Tosen to the north side of the oasis. Tosen looked towards
his saviour and was suddenly overcome with emotion. "Thank you so much. You
saved me."
"You are welcome. I saved you because I was in a situation worse than yours when
I found this oasis. I don't want anyone to experience the pain that I have felt,
so I saved you before it could get that bad."
"What can I do to repay you?"
"You don't need to do anything right now. Just save someone else when you can.
If you want, come back here and give me a visit. It'd be fun to catch up,
Tosen."
"Thanks again! I'll be back!"
Tosen walked off towards his payday. He looked back every so often and the oasis
became more distant and then faded completely from sight into the rest of the
sands. He was alone again, but not in spirit.
He never noticed that they knew his name without him telling them his name.
Shal'tash walked back towards their house and stood by their cactus. They
watched as Tosen faded into the sands and then headed inside. Their job was
complete.

View File

@ -3,7 +3,7 @@ title: The Origin of h
date: 2015-12-14 date: 2015-12-14
--- ---
NOTE: There is a [second part](https://christine.website/blog/formal-grammar-of-h-2019-05-19) to this article now with a formal grammar. NOTE: There is a [second part](https://xeiaso.net/blog/formal-grammar-of-h-2019-05-19) to this article now with a formal grammar.
For a while I have been pepetuating a small joke between my friends, co-workers and community members of various communities (whether or not this has been beneficial or harmful is out of the scope of this post). The whole "joke" is that someone says "h", another person says "h" back. For a while I have been pepetuating a small joke between my friends, co-workers and community members of various communities (whether or not this has been beneficial or harmful is out of the scope of this post). The whole "joke" is that someone says "h", another person says "h" back.

View File

@ -8,7 +8,7 @@ tags:
--- ---
EDIT(M02 20 2020): I've written a bit of a rebuttal to my own post EDIT(M02 20 2020): I've written a bit of a rebuttal to my own post
[here](https://christine.website/blog/i-was-wrong-about-nix-2020-02-10). I am [here](https://xeiaso.net/blog/i-was-wrong-about-nix-2020-02-10). I am
keeping this post up for posterity. keeping this post up for posterity.
I don't really know how I feel about [Nix][nix]. It's a functional package I don't really know how I feel about [Nix][nix]. It's a functional package

View File

@ -0,0 +1,58 @@
---
title: "Twitter, Mastodon and The Parable of rasengan"
date: 2022-04-25
tags:
- twitter
- reaction
---
So a lot of things happened today. The threat that Elon Musk made to
buy Twitter seems to have been true. As I write this, my current
understanding is that the Twitter board of directors is in the process
of accepting the offer that Elon Musk to buy the company.
<xeblog-conv name="Cadey" mood="coffee">This was not on my bingo card
for 2022. I'm starting to think that I got a dud bingo card. I was
hoping that "Alien Invasion" would win out but it looks like that
won't be the case yet. Damn.</xeblog-conv>
I genuinely have no idea how I should be reacting to this news. I
spend a lot of time on Twitter. It's a lot of how I talk with people,
network in the tech community and generally shitpost. I do so much
idle shitposting on Twitter that it would probably count for a lot of
the non-work written word I produce on a weekly basis.
I'm really not sure what I feel about this, but the feelings that I am
getting remind me of [what happened to
freenode](/blog/series/freenode). A while ago someone I follow on
Twitter made a tweet that said something like:
> Elon Musk is to Twitter as rasengan is to freenode
<xeblog-conv name="Mara" mood="hacker">For context: rasengan is the
person that single-handedly destroyed freenode by apparently buying a
worthless holding company and then inciting a lot of
drama. For more information, check out the book Closed
Projects.</xeblog-conv>
I have made my book [Closed
Projects](/blog/closed-projects-2022-03-24) free for the next week.
This contains my moods, reactions and emotions as I was processing
everything falling apart around me. I feel this may help you
understand the emotions that you get watching this shitshow unfold
with Twitter.
For a direct link to the sale page, click
[here](https://itch.io/s/69916/elon-bought-twitter-sale). I will
donate proceeds from this sale to charity. If you choose to send me a
couple bucks for my book, I will donate them to the [Orca
Conservancy](https://www.orcaconservancy.org/).
I am also on Mastodon at
[@cadey@pony.social](https://pony.social/@cadey). Should something
happen to Twitter such that I can't participate there anymore, I will
be on Mastodon. If Twitter really starts falling apart, I will
probably be a lot more active on Mastodon. And probably writing a lot
more.
Let's hope things turn out well.

View File

@ -42,7 +42,7 @@ how things changed:
As of the time of writing this post, it is January third, 2020 and the roadmap As of the time of writing this post, it is January third, 2020 and the roadmap
is apparently to release V 0.2 this month. is apparently to release V 0.2 this month.
Let's see what's been fixed since [my last article](https://christine.website/blog/v-vaporware-2019-06-23). Let's see what's been fixed since [my last article](https://xeiaso.net/blog/v-vaporware-2019-06-23).
## Compile Speed ## Compile Speed

View File

@ -125,7 +125,7 @@ designing this, but I think the next character in my blog is going to be an
anthro snow leopard named Alicia. I want Alicia to be a beginner that is very anthro snow leopard named Alicia. I want Alicia to be a beginner that is very
new to computer programming and other topics, which would then make Mara into new to computer programming and other topics, which would then make Mara into
more of a teacher type. I may also introduce my own OC Cadey (the orca looking more of a teacher type. I may also introduce my own OC Cadey (the orca looking
thing you can see [here](https://christine.website/static/img/avatar_large.png) thing you can see [here](https://xeiaso.net/static/img/avatar_large.png)
or in the favicon of my site) into the mix to reply to these questions in or in the favicon of my site) into the mix to reply to these questions in
something more close to the Socratic method. something more close to the Socratic method.

View File

@ -37,7 +37,7 @@ Be well.
--- ---
Every so often I like to check in on the [V Programming Language][vlang]. It's been Every so often I like to check in on the [V Programming Language][vlang]. It's been
about six months since [my last post](https://christine.website/blog/v-vvork-in-progress-2020-01-03), about six months since [my last post](https://xeiaso.net/blog/v-vvork-in-progress-2020-01-03),
so I thought I'd take another look at it and see what progress has been done in six so I thought I'd take another look at it and see what progress has been done in six
months. months.

View File

@ -51,10 +51,10 @@ job. TLS configuration is not its job. Its job is to run your code. Everything
else should just be provided by the system. else should just be provided by the system.
I wrote a I wrote a
[blogpost](https://christine.website/blog/land-1-syscalls-file-io-2018-06-18) [blogpost](https://xeiaso.net/blog/land-1-syscalls-file-io-2018-06-18)
about this work and even did a about this work and even did a
[talk at GoCon [talk at GoCon
Canada](https://christine.website/talks/webassembly-on-the-server-system-calls-2019-05-31) Canada](https://xeiaso.net/talks/webassembly-on-the-server-system-calls-2019-05-31)
about it. about it.
And this worked for several months as I learned WebAssembly and started to And this worked for several months as I learned WebAssembly and started to
@ -93,8 +93,8 @@ people understand low-level operating system development.
I've even written a few blogposts about Olin: I've even written a few blogposts about Olin:
- [Olin: Why](https://christine.website/blog/olin-1-why-09-1-2018) - [Olin: Why](https://xeiaso.net/blog/olin-1-why-09-1-2018)
- [Olin: The Future](https://christine.website/blog/olin-2-the-future-09-5-2018) - [Olin: The Future](https://xeiaso.net/blog/olin-2-the-future-09-5-2018)
But, this was great for running stuff interactively and via the command line. It But, this was great for running stuff interactively and via the command line. It
left me wanting more. I wanted to have that mythical functions as a service left me wanting more. I wanted to have that mythical functions as a service
@ -230,5 +230,5 @@ keep the dream alive!
[olincwa]: https://github.com/Xe/olin/tree/master/docs/cwa-spec [olincwa]: https://github.com/Xe/olin/tree/master/docs/cwa-spec
[olincwarust]: https://github.com/Xe/olin/tree/master/cwa/olin [olincwarust]: https://github.com/Xe/olin/tree/master/cwa/olin
[olincwatest]: https://github.com/Xe/olin/blob/master/cwa/tests/src/main.rs [olincwatest]: https://github.com/Xe/olin/blob/master/cwa/tests/src/main.rs
[olintempleos]: https://christine.website/blog/templeos-2-god-the-rng-2019-05-30 [olintempleos]: https://xeiaso.net/blog/templeos-2-god-the-rng-2019-05-30
[wasmcloud]: https://tulpa.dev/within/wasmcloud [wasmcloud]: https://tulpa.dev/within/wasmcloud

902
blog/we-have-go-2.markdown Normal file
View File

@ -0,0 +1,902 @@
---
title: We Already Have Go 2
date: 2022-05-25
tags:
- golang
- generics
- context
- modules
---
I've been using Go since Go 1.4. Since I started using Go then (2014-2015 ish),
I’ve seen the language evolve significantly. The Go I write today is roughly the
same Go as the Go I wrote back when I was still learning the language, but the
toolchain has changed in ways that make it so much nicer in practice. Here are
the biggest things that changed how I use Go on a regular basis:
* The compiler rewrite in Go
* Go modules
* The context package
* Generics
This is a good thing. Go has had a lot of people use it. My career would not
exist in its current form without Go. My time in the Go community has been
_catalytic_ to my career goals and it’s made me into the professional I am
today. Without having met the people I did in the Go slack, I would probably not
have gotten as lucky as I have as consistently as I have.
Releasing a "Go 2" has become a philosophical and political challenge due to the
forces that be. "Go 2" has kind of gotten the feeling of "this is never going to
happen, is it?" with how the political forces within and without the Go team are
functioning. They seem to have been incrementally releasing new features and
using version gating in `go.mod` to make it easier on people instead of a big
release with breaking changes all over the standard library.
This is pretty great and I am well in favour of this approach, but with all of
the changes that have built up there really should be a Go 2 by this point. If
only to make no significant changes and tag what we have today as Go 2.
<xeblog-conv name="Cadey" mood="coffee">Take everything I say here with a grain
of salt the size of east Texas. I am not an expert in programming language
design and I do not pretend to be one on TV. I am also not a member of the Go
team nor do I pretend to be one or see myself becoming one in the
future.<br /><br />If you are on the Go team and think that something I said
here is demonstrably wrong, please [contact me](/contact) so I can correct it. I
have tried to contain my personal feelings or observations about things to these
conversation snippets.</xeblog-conv>
This is a look back at the huge progress that has been made since Go 1 released
and what I'd consider to be the headline features of Go 2.
This is a whirlwind tour of the huge progress in improvement to the Go compiler,
toolchain, and standard library, including what I'd consider to be the headline
features of Go 2. I highly encourage you read this fairly large post in chunks
because it will feel like _a lot_ if you read it all at once.
## The Compiler Rewrite in Go
When the Go compiler was first written, it was written in C because the core Go
team has a background in Plan 9 and C was its lingua franca. However as a result
of either it being written in C or the design around all the tools it was
shelling out to, it wasn’t easy to cross compile Go programs. If you were
building windows programs on a Mac you needed to do a separate install of Go
from source with other targets enabled. This worked, but it wasn’t the default
and eventually the Go compiler rewrite in Go changed this so that Go could cross
compile natively with no extra effort required.
<xeblog-conv name="Cadey" mood="enby">This has been such an amazingly productive
part of the Go toolchain that I was shocked that Go didn’t have this out of the
gate at version 1. Most people that use Go today don’t know that there was a
point where Go didn’t have the easy to use cross-compiling superpower it
currently has, and I think that is a more sure marker of success than anything
else.</xeblog-conv>
<xeblog-conv name="Mara" mood="happy">The cross compliation powers are why
Tailscale uses Go so extensively throughout its core product. Every Tailscale
client is built on the same Go source tree and everything is in lockstep with
eachother, provided people actually update their apps. This kind of thing would
be at the least impossible or at the most very difficult in other languages like
Rust or C++.</xeblog-conv>
This one feature is probably at the heart of more CI flows, debian package
releases and other workflows than we can know. It's really hard to understate
how simple this kind of thing makes distributing software for other
architectures, especially given that macOS has just switched over to aarch64
CPUs.
Having the compiler be self-hosting does end up causing a minor amount of
grief for people wanting to bootstrap a Go compiler from absolute source code
on a new Linux distribtion (and slightly more after the minimum Go compiler
version to compile Go will be raised to Go 1.17 with the release of Go 1.19
in about 6 months from the time of this post being written). This isn't too
big of a practical issue given how fast the compiler builds, but it is a
nonzero amount of work. The bootstrapping can be made simpler with
[gccgo](https://gcc.gnu.org/onlinedocs/gccgo/), a GCC frontend that is mostly
compatible with the semantics and user experience of the Go compiler that
Google makes.
Another key thing porting the compiler to Go unlocks is the ability to compile
Go packages in parallel. Back when the compiler was written in C, the main point
of parallelism was the fact that each Go package was compiled in parallel. This
lead to people splitting up bigger packages into smaller sub-packages in order
to speedhack the compiler. Having the compiler be written in Go means that the
compiler can take advantage of Go features like its dead-simple concurrency
primitives to spread the load out across all the cores on the machine.
<xeblog-conv name="Mara" mood="hacker">The Go compiler is fast sure, but
over a certain point having each package be compiled in a single-threaded manner
adds up and can make build times slow. This was a lot worse when things like the
AWS, GCP and Kubernetes client libraries had everything in one big package.
Building those packages could take minutes, which is very long in Go
time.</xeblog-conv>
## Go Modules
In Go's dependency model, you have a folder that contains all your Go code
called the `GOPATH`. The `GOPATH` has a few top level folders that have a
well-known meaning in the Go ecosystem:
* bin: binary files made by `go install` or `go get` go here
* pkg: intermediate compiler state goes here
* src: Go packages go here
`GOPATH` has one major advantage: it is ruthlessly easy to understand the
correlation between the packages you import in your code to their locations on
disk.
If you need to see what `within.website/ln` is doing, you go to
`GOPATH/src/within.website/ln`. The files you are looking for are somewhere in
there. You don’t have to really understand how the package manager works (mostly
because there isn’t one). If you want to hack something up you just go to the
folder and add the changes you want to see.
You can delete all of the intermediate compiler state easily in one fell swoop.
Just delete the `pkg` folder and poof, it’s all gone. This was great when you
needed to free up a bunch of disk space really quickly because over months the
small amount of incremental compiler state can really add up.
The Go compiler would fetch any missing packages from the internet at build time
so things Just Worked™️. This makes it utterly trivial to check out a project and
then build/run it. That combined with `go get` to automatically just figure
things out and install them made installing programs written in Go so easy that
it’s almost magic. This combined with Go's preference for making static binaries
as much as possible meant that even if the user didn't have Go installed you could
easily make a package to hand off to your users.
The GOPATH was conceptually simple to reason about. Go code goes in the GOPATH. The
best place for it was in the GOPATH. There's no reason to put it anywhere else.
Everything was organized into its place and it was lovely.
This wasn’t perfect though. There were notable flaws in this setup that were
easy to run into in practice:
* There wasn't a good way to make sure that everyone was using the _same copies_
of every library. People did add vendoring tools later to check that everyone
was using the same copies of every package, but this also introduced problems
when one project used one version of a dependency and another project used
another in ways that were mutually incompatible.
* The process to get the newest version of a dependency was to grab the latest
commit off of the default branch of that git repo. There was support for SVN,
mercurial and fossil, but in practice Git was the most used one so it’s almost
not worth mentioning the other version control systems. This also left you at
the mercy of other random people having good code security sense and required
you to audit your dependencies, but this is fairly standard across ecosystems.
* Dependency names were case sensitive on Linux but not on Windows or macOS.
Arguably this is a "Windows and macOS are broken for backwards compatibility
reasons" thing, but this did bite me at random times without warning.
* If the wrong random people deleted their GitHub repos, there's a chance your
builds could break unless your GOPATH had the packages in it already. Then you
could share that with your coworkers or the build machine somehow, maybe even
upload those packages to a git repository to soft-fork it.
* The default location for the GOPATH created a folder in your home directory.
<xeblog-conv name="Cadey" mood="coffee">Yeah, yeah, this default was added later
but still people complained about having to put the GOPATH somewhere at first.
Having to choose a place to put all the Go code they would use seemed like a big
choice that people really wanted solid guidance and defaults on. After a while
they changed this to default to `~/go` (with an easy to use command to influence
the defaults without having to set an environment variable). I don't personally
understand the arguments people have for wanting to keep their home directory
"clean", but their preferences are valid regardless.</xeblog-conv>
Overall I think GOPATH was a net good thing for Go. It had its downsides, but as
far as these things go it was a very opinionated place to start from. This is
something typical to Go (much to people's arguments), but the main thing that it
focused on was making Go conceptually simple. There's not a lot going on there.
You have code in the folder and then that's where the Go compiler looks for
other code. It's a very lightweight approach to things that a lot of other
languages could learn a lot from. It's great for monorepos because it basically
treats all your Go code as one big monorepo. So many other languages don’t
really translate well to working in a monorepo context like Go does.
### Vendoring
That making sure everyone had the same versions of everything problem ended up
becoming a big problem in practice. I'm assuming that the original intent of the
GOPATH was to be similar to how Google's internal monorepo worked, where
everyone clones and deals with the entire GOPATH in source control. You'd then
have to do GOPATH juggling between monorepos, but the intent was to have
everything in one big monorepo anyways, so this wasn't thought of as much of a
big deal in practice. It turns out that people in fact did not want to treat Go
code this way, in practice this conflicted with the dependency model that Go
encouraged people to use with how people consume libraries from GitHub or other
such repository hosting sites.
The main disconnect between importing from a GOPATH monorepo and a Go library
off of GitHub is that when you import from a monorepo with a GOPATH in it, you
need to be sure to import the repository path and not the path used inside the
repository. This sounds weird but this means you'd import
`github.com/Xe/x/src/github.com/Xe/x/markov` instead of
`github.com/Xe/x/markov`. This means that things need to be extracted _out of_
monorepos and reformatted into "flat" repos so that you can only grab the one
package you need. This became tedious in practice.
In Go 1.5 (the one where they rewrote the compiler in Go) they added support for
[vendoring code into your
repo](https://medium.com/@freeformz/go-1-5-s-vendor-experiment-fd3e830f52c3).
The idea here was to make it easy to get closer to the model that the Go authors
envisioned for how people should use Go. Go code should all be in one big happy
repo and everything should have its place in your GOPATH. This combined with
other tools people made allowed you to vendor all of your dependencies into a
`vendor` folder and then you could do whatever you wanted from there.
One of the big advantages of the `vendor` folder was that you could clone your
git repo, create a new process namespace and then run tests without a network
stack. Everything would work offline and you wouldn't have to worry about
external state leaking in. Not to mention removing the angle of someone deleting
their GitHub repos causing a huge problem for your builds.
<xeblog-conv name="Mara" mood="happy">Save tests that require internet access or
a database engine!</xeblog-conv>
This worked for a very long time. People were able to vendor their code into
their repos and everything was better for people using Go. However the most
critical oversight with the `vendor` folder approach was that the Go team didn't
create an official tool to manage that `vendor` folder. They wanted to let tools
like `godep` and `glide` handle that. This is kind of a reasonable take, Go
comes from a very Google culture where this kind of problem doesn't happen, so
as a result they probably won't be able to come up with something that meets the
needs of the outside world very easily.
<xeblog-conv name="Cadey" mood="enby">I can't speak for how `godep` or `glide`
works, I never really used them enough to have a solid opinion. I do remember
using [`vendor`](https://github.com/bmizerany/vendor) in my own projects though.
That had no real dependency resolution algorithm to speak of because it assumed
that you had everything working locally when you vendored the code.</xeblog-conv>
### `dep`
After a while the Go team worked with people in the community to come up with an
"official experiment" in tracking dependencies called `dep`. `dep` was a tool
that used some more fancy computer science maths to help developers declare
dependencies for projects in a way like you do in other ecosystems. When `dep`
was done thinking, it emitted a bunch of files in `vendor` and a lockfile in
your repository. This worked really well and when I was working at Heroku this
was basically our butter and bread for how to deal with Go code.
<xeblog-conv name="Cadey" mood="enby">It probably helped that my manager was on
the team that wrote `dep`.</xeblog-conv>
One of the biggest advantages of `dep` over other tools was the way that it
solved versioning. It worked by having each package declare
[constraints](https://golang.github.io/dep/docs/the-solver.html) in the ranges
of versions that everything requires. This allowed it to do some fancy
dependency resolution math similar to how the solvers in `npm` or `cargo` work.
This worked fantastically in the 99% case. There were some fairly easy to
accidentally get yourself in cases where you could make the solver loop
infinitely though, as well as ending up in a state where you have mutually
incompatible transient dependencies without any real way around it.
<xeblog-conv name="Mara" mood="hacker">`npm` and `cargo` work around this by
letting you use multiple versions of a single dependency in a
project.</xeblog-conv>
However these cases were really really rare, only appearing in much, much larger
repositories. I don't think I practically ran into this, but I'm sure someone
reading this right now found themselves in `dep` hell and probably has a hell of
a war story around it.
### vgo and Modules
This lead the Go team to come up with a middle path between the unrestricted
madness of GOPATH and something more maximal like `dep`. They eventually called
this Go modules and the core reasons for it are outlined in [this series of
technical posts](https://research.swtch.com/vgo).
<xeblog-conv name="Mara" mood="hacker">These posts are a very good read and I'd
highly suggest reading them if you've never seem then before. It outlines the
problem space and the justification for the choices that Go modules ended up
using. I don't agree with all of what is said there, but overall it's well
worth reading at least once if you want to get an idea of the inspirations
that lead to Go modules.</xeblog-conv>
Apparently the development of Go modules came out as a complete surprise,
even to the core developer team of `dep`. I'm fairly sure this lead my
manager to take up woodworking as his main non work side hobby, I can only
wonder about the kind of resentment this created for other parts of the
`dep` team. They were under the impression that `dep` was going to be the
future of the ecosystem (likely under the subcommand `go dep`) and then had
the rug pulled out from under their feet.
<xeblog-conv name="Cadey" mood="coffee">The `dep` team was as close as we've
gotten for having people in the _actual industry_ using Go _in production_
outside of Google having a real voice in how Go is used in the real world. I
fear that we will never have this kind of thing happen again.<br /><br />It's
also worth noting that the fallout of this lead to the core `dep` team leaving
the Go community.</xeblog-conv>
<xeblog-conv name="Mara" mood="hmm">Well, Google has to be using Go modules in
their monorepo, right? If that's the official build system for Go it makes sense
that they'd be dogfooding it hard enough that they'd need to use the tool in the
same way that everyone else did.</xeblog-conv>
<xeblog-conv name="Numa" mood="delet">lol nope. They use an overcomplicated
bazel/blaze abomination that has developed in parallel to their NIH'd source
control server. Google doesn't have to deal with the downsides of Go modules
unless it's in a project like Kubernetes. It's easy to imagine that they just
don't have the same problems that everyone else does due to how weird Google
prod is. Google only has problems that Google has, and statistically your
company is NOT Google.</xeblog-conv>
Go modules does solve one very critical problem for the Go ecosystem though: it
allows you to have the equivalent of the GOPATH but with multiple versions of
dependencies in it. It allows you to have `within.website/ln@v0.7` and
`within.website/ln@0.9` as dependencies for _two different projects_ without
having to vendor source code or do advanced GOPATH manipulation between
projects. It also adds cryptographic checksumming for each Go module that you
download from the internet, so that you can be sure the code wasn't tampered
with in-flight. They also created a cryptographic checksum comparison server so
that you could ask a third party to validate what it thinks the checksum is so
you can be sure that the code isn't tampered with on the maintainer's side. This
also allows you to avoid having to shell out to `git` every time you fetch a
module that someone else has fetched before. Companies could run their own Go
module proxy and then use that to provide offline access to Go code fetched from
the internet.
<xeblog-conv name="Mara" mood="hmm">Wait, couldn't this allow Google to see the
source code of all of your Go dependencies? How would this intersect with
private repositories that shouldn't ever be on anything but work
machines?</xeblog-conv>
<xeblog-conv name="Cadey" mood="coffee">Yeah, this was one of the big privacy
disadvantages out of the gate with Go modules. I think that in practice the
disadvantages are limited, but still the fact that it defaults to phoning home
to Google every time you run a Go build without all the dependencies present
locally is kind of questionable. They did make up for this with the checksum
verification database a little, but it's still kinda sus.<br /><br />I'm not
aware of any companies I've worked at running their own internal Go module
caching servers, but I ran my own for a very long time.</xeblog-conv>
The earliest version of Go modules basically was a glorified `vendor` folder
manager named `vgo`. This worked out amazingly well and probably made
prototyping this a hell of a lot easier. This worked well enough that we used
this in production for many services at Heroku. We had no real issues with it
and most of the friction was with the fact that most of the existing ecosystem
had already been using `dep` or `glide`.
<xeblog-conv name="Mara" mood="hacker">There was a bit of interoperability glue
that allowed `vgo` to parse the dependency definitions in `dep`, `godep` and
`glide`. This still exists today and helps `go mod init` tell what dependencies
to import into the Go module to aid migration.</xeblog-conv>
If they had shipped this in prod, it probably would have been a huge success. It
would also let people continue to use `dep`, `glide` and `godep`, but just doing
that would also leave the ecosystem kinda fragmented. You’d need to have code
for all 4 version management systems to parse their configuration files and
implement algorithms that would be compatible with the semantics of all of them.
It would work and the Go team is definitely smart enough to do it, but in
practice it would be a huge mess.
This also solved the case-insensitive filesystem problem with
[bang-casing](https://go.dev/ref/mod#goproxy-protocol). This allows them to
encode the capital letters in a path in a way that works on macOS and Windows
without having to worry about horrifying hacks that are only really in place for
Photoshop to keep working.
### The Subtle Problem of `v2`
However one of the bigger downsides that came with Go modules is what I've been
calling the "v2 landmine" that Semantic Import Versioning gives you. One of the
very earliest bits of Go advice was to make the import paths for version 1 of a
project and version 2 of a project different so that people can mix the two to
allow more graceful upgrading across a larger project. Semantic Import
Versioning enforces this at the toolchain level, which means that it can be the
gate between compiling your code or not.
<xeblog-conv name="Cadey" mood="coffee">Many people have been telling me that
I’m kind of off base for thinking that this is a landmine for people, but I am
using the term “landmine” to talk about this because I feel like it reflects the
rough edges of unexpectedly encountering this in the wild. It kinda feels like
you stepped on a landmine.</xeblog-conv>
<xeblog-conv name="Numa" mood="delet">It's also worth noting that the protobuf
team didn't use major version 2 when making an API breaking change. They
defended this by saying that they are changing the import path away from GitHub,
but it feels like they wanted to avoid the v2 problem.</xeblog-conv>
The core of this is that when you create major version 2 of a Go project, you
need to adjust all your import paths everywhere in that project to import the
`v2` of that package or you will silently import the `v1` version of that
package. This can end up making large projects create circular dependencies on
themselves, which is quite confusing in practice. When consumers are aware of
this, then they can use that to more gradually upgrade larger codebases to the
next major version of a Go module, which will allow for smaller refactors.
This also applies to consumers. Given that this kind of thing is something that
you only do in Go it can come out of left field. The go router
[github.com/go-chi/chi](https://github.com/go-chi/chi/issues/462) tried doing
modules in the past and found that it lead to confusing users. Conveniently they
only really found this out after the Go modules design was considered final and
Semantic Import Versioning has always been a part of Go modules and the Go team
is now refusing to budge on this.
<xeblog-conv name="Cadey" mood="coffee">My suggestion to people is to never
release a version `1.x.x` of a Go project to avoid the "v2 landmine". The Go
team claims that the right bit of tooling can help ease the pain, but this
tooling never really made it out into the public. I bet it works great inside
Google's internal monorepo though!</xeblog-conv>
When you were upgrading a Go project that already hit major version 2 or
higher to Go modules, adopting Go modules forced maintainers to make another
major version bump because it would break all of the import paths for every
package in the module. This caused some maintainers to meet Go modules with
resistance to avoid confusing their consumers. The workarounds for people that
still used GOPATH using upstream code with Semantic Import Versioning in it
were also kind of annoying at first until the Go team added "minimal module
awareness" to GOPATH mode. Then it was fine.
<xeblog-conv name="Mara" mood="hmm">It feels like you are overly focusing on the
`v2` problem. It can't really be that bad, can it? `grpc-gateway` updated to v2
without any major issues. What's a real-world example of this?</xeblog-conv>
<xeblog-conv name="Numa" mood="delet">The situation with
[github.com/gofrs/uuid](https://github.com/gofrs/uuid/issues/61) was heckin'
bad. Arguably it's a teething issue as the ecosystem was still moving to the new
modules situation, but it was especially bad for projects that were already at
major version 2 or higher because adding Go modules support meant that they
needed to update the major version just for Go modules. This was a tough sell
and rightly so.<br /><br />This was claimed to be made a non-issue by the right
application of tooling on the side, but this tooling was either never developed
or not released to us mere mortals outside of Google. Even with automated
tooling this can still lead to massive diffs that are a huge pain to review,
even if the only thing that is changed is the version number in every import of
every package in that module. This was even worse for things that have C
dependencies, as if you didn't update it everywhere in your dependency chain you
could have two versions of the same C functions try to be linked in and this
really just does not work.</xeblog-conv>
Overall though, Go modules has been a net positive for the community and for
people wanting to create reliable software in Go. It’s just such a big semantics
break in how the toolchain works that I almost think it would have been easier
for the to accept if _that_ was Go 2. Especially since the semantic of how the
toolchain worked changed so much.
<xeblog-conv name="Mara" mood="hmm">Wait, doesn’t the Go compiler have a
backwards compatibility promise that any code built with Go 1.x works on go
1.(x+1)?</xeblog-conv>
<xeblog-conv name="Cadey" mood="coffee">Yes, but that only applies to _code you
write_, not _semantics of the toolchain_ itself. On one hand this makes a lot of
sense and on the other it feels like a cop-out. The changes in how `go get` now
refers to adding dependencies to a project and `go install` now installs a
binary to the system have made an entire half decade of tool installation
documentation obsolete. It’s understandable why they want to make that change,
but the way that it broke people’s muscle memory is [quite frustrating for
users](https://github.com/golang/go/issues/40276#issuecomment-1109797059) that
aren’t keeping on top of every single change in semantics of toolchains (this
bites me constantly when I need to quick and dirty grab something outside of a
Nix package). I understand _why_ this isn’t a breaking change as far as the
compatibility promise but this feels like a cop-out in my subjective
opinion.</xeblog-conv>
## Contexts
One of Go’s major features is its co-operative threading system that it calls
goroutines. Goroutines are kinda like coroutines that are scheduled by the
scheduler. However there is no easy way to "kill" a goroutine. You have to add
something to the invocation of the goroutine that lets you signal it to stop and
then opt-in the goroutine to stop.
Without contexts you would need to do all of this legwork manually. Every
project from the time before contexts still shows signs of this. The best
practice was to make a "stop" channel like this:
```go
stop := make(chan struct{})
```
And then you'd send a cancellation signal like this:
```go
stop <- struct{}{}
```
<xeblog-conv name="Mara" mood="hacker">The type `struct{}` is an anonymous
structure value that takes 0 bytes in ram. It was suggested to use this as your
stopping signal to avoid unneeded memory allocations. A `bool` needs one whole
machine word, which can be up to 64 bits of ram. In practice the compiler can
smoosh multiple bools in a struct together into one place in ram, but when
sending these values over a channel like this you can't really cheat that
way.</xeblog-conv>
This did work and was the heart of many event loops, but the main problem with
it is that the signal was only sent _once_. Many other people also followed up
the stop signal by closing the channel:
```go
close(stop)
```
However with naĂŻve stopping logic the closed channel would successfully fire a
zero value of the event. So code like this would still work the way you wanted:
```go
select {
case <- stop:
haltAndCatchFire()
}
```
### Package `context`
However if your stop channel was a `chan bool` and you relied on the `bool`
value being `true`, this would fail because the value would be `false`. This
was a bit too brittle for comfortable widespread production use and we ended
up with the [context](https://pkg.go.dev/context) package in the standard
library. A Go context lets you more easily and uniformly handle timeouts and
giving up when there is no more work to be done.
<xeblog-conv name="Mara" mood="hacker">This started as something that existed
inside the Google monorepo that escaped out into the world. They also claim to
have an internal tool that makes
[`context.TODO()`](https://pkg.go.dev/context#TODO) useful (probably by showing
you the callsites above that function?), but they never released that tool as
open source so it’s difficult to know where to use it without that added
context.</xeblog-conv>
One of the most basic examples of using contexts comes when you are trying to
stop something from continuing. If you have something that constantly writes
data to clients such as a pub-sub queue, you probably want to stop writing data
to them when the client disconnects. If you have a large number of HTTP requests
to do and only so many workers can make outstanding requests at once, you
want to be able to set a timeout so that after a certain amount of time it gives
up.
Here's an example of using a context in an event processing loop (of course while
pretending that fetching the current time is anything else that isn't a contrived
example to show this concept off):
```go
t := time.NewTicker(30 * time.Second)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
for {
select {
case <- ctx.Done():
log.Printf("not doing anything more: %v", ctx.Err())
return
case data := <- t.C:
log.Printf("got data: %s", data)
}
}
```
This will have the Go runtime select between two channels, one of them will
emit the current time every 30 seconds and the other will fire when the
`cancel` function is called.
<xeblog-conv name="Mara" mood="happy">Don't worry, you can call the `cancel()`
function multiple times without any issues. Any additional calls will not do
anything special.</xeblog-conv>
If you want to set a timeout on this (so that the function only tries to run
for 5 minutes), you'd want to change the second line of that example to this:
```go
ctx, cancel := context.WithTimeout(context.Background(), 5 * time.Minute)
defer cancel()
```
<xeblog-conv name="Mara" mood="happy">You should always `defer cancel()` unless
you can prove that it is called elsewhere. If you don't do this you can leak
goroutines that will dutifully try to do their job potentially forever without
any ability to stop them.</xeblog-conv>
The context will be automatically cancelled after 5 minutes. You can cancel it
sooner by calling the `cancel()` function should you need to. Anything else in
the stack that is context-aware will automatically cancel as well as the
cancellation signal percolates down the stack and across goroutines.
You can attach this to an HTTP request by using
[`http.NewRequestWithContext`](https://pkg.go.dev/net/http#NewRequestWithContext):
```go
req, err := http.NewRequestWithContext(ctx, http.MethodGet, "https://xeiaso.net/.within/health", nil)
```
And then when you execute the request (such as with `http.DefaultClient.Do(req)`)
the context will automatically be cancelled if it takes too long to fetch the
response.
You can also wire this up to the `Control-c` signal using a bit of code
[like this](https://medium.com/@matryer/make-ctrl-c-cancel-the-context-context-bd006a8ad6ff).
Context cancellation propagates upwards, so you can use this to ensure that things
get stopped properly.
<xeblog-conv name="Mara" mood="hacker">Be sure to avoid creating a "god context"
across your entire app. This is a known anti-pattern and this pattern should only
be used for small command line tools that have an expected run time in the minutes
at worst, not hours like production bearing services.</xeblog-conv>
This is a huge benefit to the language because of how disjointed the process of
doing this before contexts was. Because this wasn’t in the core of the language,
every single implementation was different and required learning what the library
did. Not to mention adapting between libraries could be brittle at best and
confusing at worst.
I understand why they put data into the context type, but in practice I really
wish they didn’t do that. This feature has been abused a lot in my experience.
At Heroku a few of our production load bearing services used contexts as a
dependency injection framework. This did work, but it turned a lot of things
that would normally be compile time errors into runtime errors.
<xeblog-conv name="Cadey" mood="coffee">I say this as someone who maintains a
library that uses contexts to store [contextually relevant log
fields](https://pkg.go.dev/within.website/ln) as a way to make logs easier to
correlate between.<br /><br />Arguably you could make the case that people are misusing the
tool and of course this is what will happen when you do that but I don't know if
this is really the right thing to tell people.</xeblog-conv>
I wish contexts were in the core of the language from the beginning. I know that
it is difficult to do this in practice (especially on all the targets that Go
supports), but having cancellable syscalls would be so cool. It would also be
really neat if contexts could be goroutine-level globals so you didn’t have to
"pollute" the callsites of every function with them.
<xeblog-conv name="Cadey" mood="coffee">At the time contexts were introduced,
one of the major arguments I remember hearing against them was that contexts
"polluted" their function definitions and callsites. I can't disagree with this
sentiment, at some level it really does look like contexts propagate "virally"
throughout a codebase.<br /><br />I think that the net improvements to
reliability and understandability of how things get stopped do make up for this
though. Instead of a bunch of separate ways to cancel work in each individual
library you have the best practice in the standard library. Having contexts
around makes it a lot harder to "leak" goroutines on accident.</xeblog-conv>
## Generics
One of the biggest ticket items that Go has added is "generic types", or being
able to accept types as parameters for other types. This is really a huge ticket
item and I feel that in order to understand _why_ this is a huge change I need
to cover the context behind what you had before generics were added to the
language.
One of the major standout features of Go is interface types. They are like Rust
Traits, Java Interfaces, or Haskell Typeclasses; but the main difference is that
interface types are _implicit_ rather than explicit. When you want to meet the
signature of an interface, all you need to do is implement the contract that the
interface spells out. So if you have an interface like this:
```go
type Quacker interface {
Quack()
}
```
You can make a type like `Duck` a `Quacker` by defining the `Duck` type and a
`Quack` method like this:
```go
type Duck struct{}
func (Duck) Quack() { fmt.Println("Quack!") }
```
But this is not limited to just `Ducks`, you could easily make a `Sheep` a
`Quacker` fairly easily:
```go
type Sheep struct{}
func (Sheep) Quack() { fmt.Println("*confused sheep noises*") }
```
This allows you to deal with expected _behaviors_ of types rather than having to
have versions of functions for every concrete implementation of them. If you
want to read from a file, network socket, `tar` archive, `zip` archive, the
decrypted form of an encrypted stream, a TLS socket, or a HTTP/2 stream they're
all [`io.Reader`](https://pkg.go.dev/io#Reader) instances. With the example
above we can make a function that takes a `Quacker` and then does something with
it:
```go
func main() {
duck := Duck{}
sheep := Sheep{}
doSomething(duck)
doSomething(sheep)
}
func doSomething(q Quacker) {
q.Quack()
}
```
<xeblog-conv name="Mara" mood="hacker">If you want to play with this example,
check it out on the Go playground [here](https://go.dev/play/p/INK8O2O-D01). Try
to make a slice of Quackers and pass it to `doSomething`!</xeblog-conv>
You can also embed interfaces into other interfaces, which will let you create
composite interfaces that assert multiple behaviours at once. For example,
consider [`io.ReadWriteCloser`](https://pkg.go.dev/io#ReadWriteCloser). Any
value that matches an `io.Reader`, `io.Writer` and an `io.Closer` will be able
to be treated as an `io.ReadWriteCloser`. This allows you to assert a lot of
behaviour about types even though the actual underlying types are opaque to you.
This means it’s easy to split up a [`net.Conn`](https://pkg.go.dev/net#Conn)
into its reader half and its writer half without really thinking about
it:
```go
conn, _ := net.Dial("tcp", "127.0.0.1:42069")
var reader io.Reader = conn
var writer io.Writer = conn
```
And then you can pass the writer side off to one function and the reader side
off to another.
There’s also a bunch of room for "type-level middleware" like
[`io.LimitReader`](https://pkg.go.dev/io#LimitReader). This allows you to set
constraints or details around an interface type while still meeting the contract
for that interface, such as an `io.Reader` that doesn’t let you read too much,
an `io.Writer` that automatically encrypts everything you feed It with TLS, or
even something like sending data over a Unix socket instead of a TCP one. If it
fits the shape of the interface, it Just Works.
However, this falls apart when you want to deal with a collection of _only one_
type that meets an interface at once. When you create a slice of `Quacker`s and
pass it to a function, you can put both `Duck`s and `Sheep` into that slice:
```go
quackers := []Quacker{
Duck{},
Sheep{},
}
doSomething(quackers)
```
If you want to assert that every `Quacker` is the same type, you have to do some
fairly brittle things that step around Go's type safety like this:
```go
func doSomething(qs []Quacker) error {
// Store the name of the type of first Quacker.
// We have to use the name `typ` because `type` is
// a reserved keyword.
typ := fmt.Sprintf("%T", qs[0])
for i, q := range qs {
if qType := fmt.Sprintf("%T", q); qType != typ {
return fmt.Errorf("slice value %d was type %s, wanted: %s", qType, typ)
}
q.Quack()
}
return nil
}
```
This would explode at runtime. This same kind of weakness is basically the main
reason why the Go standard library package [`container`](https://pkg.go.dev/container)
is mostly unused. Everything in the `container` package deals with
`interface{}`/`any` values, which is Go for "literally anything". This means
that without careful wrapper code you need to either make interfaces around
everything in your lists (and then pay the cost of boxing everything in an
interface, which adds up a lot in practice in more ways than you'd think) or
have to type-assert anything going into or coming out of the list, combined
with having to pay super close attention to anything touching that code
during reviews.
<xeblog-conv name="Cadey" mood="enby">Don't get me wrong, interface types
are an _amazing_ standout feature of Go. They are one of the main reasons that
Go code is so easy to reason about and work with. You don't have to worry
about the entire tree of stuff that a value is made out of, you can just
assert that values have behaviors and then you're off to the races. I end up
missing the brutal simplicity of Go interfaces in other languages like Rust.
</xeblog-conv>
### Introducing Go Generics
In Go 1.18, support for adding types as parameters to other types was added.
This allows you to define constraints on what types are accepted by a function,
so that you can reuse the same logic for multiple different kinds of underlying
types.
That `doSomething` function from above could be rewritten like this with
generics:
```go
func doSomething[T Quacker](qs []T) {
for i, q := range qs {
q.Quack()
}
}
```
However this doesn't currently let you avoid mixing types of `Quacker`s at
compile time like I assumed while I was writing the first version of this
article. This does however let you write code like this:
```go
doSomething([]Duck{{}, {}, {}})
doSomething([]Sheep{{}, {}, {}})
```
And then this will reject anything that _is not a `Quacker`_ at compile time:
```go
doSomething([]string{"hi there this won't work"})
```
```
./prog.go:20:13: string does not implement Quacker (missing Quack method)
```
### Unions
This also lets you create untagged union types, or types that can be a range of
other types. These are typically useful when writing parsers or other similar
things.
<xeblog-conv name="Numa" mood="delet">It's frankly kind of fascinating that
something made by Google would even let you _think_ about the word "union" when
using it.</xeblog-conv>
Here's an example of a union type of several different kinds of values that you
could realistically see in a parser for a language like [LOLCODE](http://www.lolcode.org/):
```go
// Value can hold any LOLCODE value as defined by the LOLCODE 1.2 spec[1].
//
// [1]: https://github.com/justinmeza/lolcode-spec/blob/master/v1.2/lolcode-spec-v1.2.md#types
type Value interface {
int64 // NUMBR
float64 // NUMBAR
string // YARN
bool // TROOF
struct{} // NOOB
}
```
This is similar to making something like an
[`enum`](https://doc.rust-lang.org/book/ch06-01-defining-an-enum.html) in Rust,
except that there isn't any tag for what the data could be. You still have to do
a type-assertion over every value it _could_ be, but you can do it with only the
subset of values listed in the interface vs any possible type ever made. This
makes it easier to constrain what values can be so you can focus more on your
parsing code and less on defensively programming around variable types.
This adds up to a huge improvement to the language, making things that were
previously very tedious and difficult very easy. You can make your own
generic collections (such as a B-Tree) and take advantages of packages like
[`golang.org/x/exp/slices`](https://pkg.go.dev/golang.org/x/exp/slices) to avoid
the repetition of having to define utility functions for every single type you
use in a program.
<xeblog-conv name="Cadey" mood="enby">I'm barely scratching the surface with
generics here, please see the [type parameters proposal
document](https://go.googlesource.com/proposal/+/refs/heads/master/design/43651-type-parameters.md)
for a lot more information on how generics work. This is a well-written thing
and I highly suggest reading this at least once before you try to use generics
in your Go code. I've been watching this all develop from afar and I'm very
happy with what we have so far (the only things I'd want would be a bit more
ability to be precise about what you are allowing with slices and maps as
function arguments).</xeblog-conv>
---
In conclusion, I believe that we already have Go 2. It’s just called Go 1.18 for
some reason. It’s got so many improvements and fundamental changes that I
believe that this is already Go 2 in spirit. There are so many other things that
I'm not covering here (mostly because this post is so long already) like
fuzzing, RISC-V support, binary/octal/hexadecimal/imaginary number literals,
WebAssembly support, so many garbage collector improvements and more. This has
added up to make Go a fantastic choice for developing server-side applications.
I, as some random person on the internet that is not associated with the Go
team, think that if there was sufficient political will that they could probably
label what we have as Go 2, but I don’t think that is going to happen any time
soon. Until then, we still have a very great set of building blocks that allow
you to make easy to maintain production quality services, and I don’t see that
changing any time soon.
---
<xeblog-conv name="Mara" mood="happy">If you had subscribed to the
[Patreon](https://patreon.com/cadey) you could have read this a week
ago!</xeblog-conv>

View File

@ -15,4 +15,4 @@ of that talk has been posted.
I hope you enjoy! I have some more blogposts in the queue but I've been sleeping horribly lately. Here's hoping that clears up. I hope you enjoy! I have some more blogposts in the queue but I've been sleeping horribly lately. Here's hoping that clears up.
[goconcanada]: https://gocon.ca/ [goconcanada]: https://gocon.ca/
[talklink]: https://christine.website/talks/webassembly-on-the-server-system-calls-2019-05-31 [talklink]: https://xeiaso.net/talks/webassembly-on-the-server-system-calls-2019-05-31

View File

@ -31,7 +31,7 @@ $ curl https://mi.within.website/api/webmention/01ERGGEG7DCKRH3R7DH4BXZ6R9 | jq
{ {
"id": "01ERGGEG7DCKRH3R7DH4BXZ6R9", "id": "01ERGGEG7DCKRH3R7DH4BXZ6R9",
"source_url": "https://maya.land/responses/2020/12/01/i-think-this-blog-post-might-have-been.html", "source_url": "https://maya.land/responses/2020/12/01/i-think-this-blog-post-might-have-been.html",
"target_url": "https://christine.website/blog/toast-sandwich-recipe-2019-12-02", "target_url": "https://xeiaso.net/blog/toast-sandwich-recipe-2019-12-02",
"title": null "title": null
} }
``` ```

View File

@ -80,7 +80,7 @@ in one of a few ways:
Some concepts are pulled in from various documents and ideas in a slightly Some concepts are pulled in from various documents and ideas in a slightly
[kasmakfa](https://write.as/excerpts/practical-kasmakfa) manner, but overall the [kasmakfa](https://write.as/excerpts/practical-kasmakfa) manner, but overall the
most "confusing" thing to new readers is going to be related to this comment in most "confusing" thing to new readers is going to be related to this comment in
the [anapana](https://christine.website/blog/when-then-zen-anapana-2018-08-15) the [anapana](https://xeiaso.net/blog/when-then-zen-anapana-2018-08-15)
feature: feature:
> Note: "the body" means the sack of meat and bone that you are currently living inside. For the purposes of explanation of this technique, please consider what makes you yourself separate from the body you live in. > Note: "the body" means the sack of meat and bone that you are currently living inside. For the purposes of explanation of this technique, please consider what makes you yourself separate from the body you live in.

View File

@ -32,7 +32,7 @@ This article is a more verbose version of [the correlating feature from when-the
The When Then Zen project aims to describe the finer points of meditative concepts in plain English. As such, we start assuming just about nothing and build fractally on top of concepts derived from common or plain English usage of the terms. Some of these techniques may be easier for people with a more intensive meditative background, but try things and see what works best for you. Meditation in general works a lot better when you have a curious and playful attitude about figuring things out. The When Then Zen project aims to describe the finer points of meditative concepts in plain English. As such, we start assuming just about nothing and build fractally on top of concepts derived from common or plain English usage of the terms. Some of these techniques may be easier for people with a more intensive meditative background, but try things and see what works best for you. Meditation in general works a lot better when you have a curious and playful attitude about figuring things out.
I'm not perfect. I don't know what will work best for you. A lot of this is documenting both my practice and what parts of what books helped me "get it". If this works for you, [please let me know](https://christine.website/contact). If this doesn't work for you, [please let me know](https://christine.website/contact). I will use this information for making direct improvements to these documents. I'm not perfect. I don't know what will work best for you. A lot of this is documenting both my practice and what parts of what books helped me "get it". If this works for you, [please let me know](https://xeiaso.net/contact). If this doesn't work for you, [please let me know](https://xeiaso.net/contact). I will use this information for making direct improvements to these documents.
As for your practice, twist the rules into circles and scrape out the parts that don't work if it helps you. Find out how to integrate it into your life in the best manner and go with it. As for your practice, twist the rules into circles and scrape out the parts that don't work if it helps you. Find out how to integrate it into your life in the best manner and go with it.

View File

@ -8,7 +8,7 @@ tags:
--- ---
This website has been a progressive web app [for a long This website has been a progressive web app [for a long
time](https://christine.website/blog/progressive-webapp-conversion-2019-01-26). time](https://xeiaso.net/blog/progressive-webapp-conversion-2019-01-26).
This means that you can install my blog to your phone as if it was a normal app This means that you can install my blog to your phone as if it was a normal app
via the share menu in Safari on iOS or via other native prompts on other via the share menu in Safari on iOS or via other native prompts on other
browsers. However, this is not enough. In the constant pursuit of advancement I browsers. However, this is not enough. In the constant pursuit of advancement I

View File

@ -0,0 +1,64 @@
---
title: How to Store an SSH Key on a Yubikey
date: 2022-05-27
series: howto
tags:
- yubikey
- security
---
SSH keys suck. They are a file on the disk and you can easily move it to other
machines instead of storing them in hardware where they can't be exfiltrated.
Using a password to encrypt the private key is a viable option, but the UX for
that is hot garbage. It's allegedly the future, so surely we MUST have some way
to make this all better, right?
<xeblog-conv name="Numa" mood="delet">\>implying there is a way to make anything
security related better</xeblog-conv>
Luckily, there is actually something we can do for this! As of [OpenSSH
8.2](https://www.openssh.com/releasenotes.html#8.2) (Feburary 14, 2020) you are
able to store an SSH private key on a yubikey! Here's how to do it.
<xeblog-conv name="Mara" mood="hacker">This should work on other FIDO keys like
Google's Titan, but we don't have access to one over here and as such haven't
tested it. Your mileage may vary. We are told that it works with the Google
Titan key that is handed out to Go contributors.</xeblog-conv>
First install `yubikey-manager` (see
[here](https://www.yubico.com/support/download/yubikey-manager/) for more
information, or run `nix-shell -p yubikey-manager` to run it without installing
it on NixOS), plug in your yubikey and run `ykman list`:
```console
$ ykman list
YubiKey 5C NFC (5.4.3) [OTP+FIDO+CCID] Serial: 4206942069
```
If you haven't set a PIN for the yubikey yet, follow
[this](https://docs.yubico.com/software/yubikey/tools/ykman/FIDO_Commands.html#ykman-fido-access-change-pin-options)
to set a PIN of your choice. Once you do this, you can generate a new SSH key
with the following command:
```
ssh-keygen -t ed25519-sk -O resident
```
<xeblog-conv name="Mara" mood="hacker">If that fails, try `ecdsa-sk`
instead! Some hardware keys may not support storing the key on the key
itself.</xeblog-conv>
Then enter in a super secret password (such as the Tongues you received as a kid
when you were forced into learning the bible against your will) twice and then
add that key to your agent with `ssh-add -K`. Then you can list your keys with
`ssh-add -L`:
```console
$ ssh-add -L
sk-ssh-ed25519@openssh.com AAAAGnNrLXNzaC1lZDI1NTE5QG9wZW5zc2guY29tAAAAIKgGePSwpBuHUhrFCRLch9Usqi7L0fKtgTRnh6F/R+ruAAAABHNzaDo= cadey@shachi
```
Then you can copy this public key to GitHub or whatever and authenticate as
normal. The private key is stored on your yubikey directly and you can add it
with `ssh-add -K`. You can delete the ssh key stub at `~/.ssh/id_ed25519_sk` and
then your yubikey will be the only thing holding that key.

View File

@ -8,7 +8,7 @@ tags:
--- ---
As I mentioned As I mentioned
[before](https://christine.website/blog/colemak-layout-2020-08-15), I ordered a [before](https://xeiaso.net/blog/colemak-layout-2020-08-15), I ordered a
[ZSA Moonlander](https://zsa.io/moonlander) and it has finally arrived. I am [ZSA Moonlander](https://zsa.io/moonlander) and it has finally arrived. I am
writing this post from my Moonlander, and as such I may do a few more typos writing this post from my Moonlander, and as such I may do a few more typos
than normal, I'm still getting used to this. than normal, I'm still getting used to this.

View File

@ -22,7 +22,7 @@ no influence pushing me either way on this keyboard.
desk](https://cdn.christine.website/file/christine-static/img/keeb/Elm3dN8XUAAYHws.jpg) desk](https://cdn.christine.website/file/christine-static/img/keeb/Elm3dN8XUAAYHws.jpg)
[That 3d printed brain is built from the 3D model that was made as a part of <a [That 3d printed brain is built from the 3D model that was made as a part of <a
href="https://christine.website/blog/brain-fmri-to-3d-model-2019-08-23">this href="https://xeiaso.net/blog/brain-fmri-to-3d-model-2019-08-23">this
blogpost</a>.](conversation://Mara/hacker) blogpost</a>.](conversation://Mara/hacker)
## tl;dr ## tl;dr
@ -131,7 +131,7 @@ standard [Colemak](https://Colemak.com/) layout and it is currently the layer I
type the fastest on. I have the RGB configured so that it is mostly pink with type the fastest on. I have the RGB configured so that it is mostly pink with
the homerow using a lighter shade of pink. The color codes come from my logo the homerow using a lighter shade of pink. The color codes come from my logo
that you can see in the favicon [or here for a larger that you can see in the favicon [or here for a larger
version](https://christine.website/static/img/avatar_large.png). version](https://xeiaso.net/static/img/avatar_large.png).
I also have a qwerty layer for gaming. Most games expect qwerty keyboards and I also have a qwerty layer for gaming. Most games expect qwerty keyboards and
this is an excellent stopgap to avoid having to rebind every game that I want to this is an excellent stopgap to avoid having to rebind every game that I want to

View File

@ -1,87 +1,11 @@
let Person = let xesite = ./dhall/types/package.dhall
{ Type = { name : Text, tags : List Text, gitLink : Text, twitter : Text }
, default =
{ name = "", tags = [] : List Text, gitLink = "", twitter = "" }
}
let Author = let Config = xesite.Config
{ Type =
{ name : Text
, handle : Text
, picUrl : Optional Text
, link : Optional Text
, twitter : Optional Text
, default : Bool
, inSystem : Bool
}
, default =
{ name = ""
, handle = ""
, picUrl = None Text
, link = None Text
, twitter = None Text
, default = False
, inSystem = False
}
}
let defaultPort = env:PORT ? 3030
let defaultWebMentionEndpoint =
env:WEBMENTION_ENDPOINT
? "https://mi.within.website/api/webmention/accept"
let Config =
{ Type =
{ signalboost : List Person.Type
, authors : List Author.Type
, port : Natural
, clackSet : List Text
, resumeFname : Text
, webMentionEndpoint : Text
, miToken : Text
}
, default =
{ signalboost = [] : List Person.Type
, authors =
[ Author::{
, name = "Xe Iaso"
, handle = "xe"
, picUrl = Some "/static/img/avatar.png"
, link = Some "https://christine.website"
, twitter = Some "theprincessxena"
, default = True
, inSystem = True
}
, Author::{
, name = "Jessie"
, handle = "Heartmender"
, picUrl = Some
"https://cdn.christine.website/file/christine-static/img/UPRcp1pO_400x400.jpg"
, link = Some "https://heartmender.writeas.com"
, twitter = Some "BeJustFine"
, inSystem = True
}
, Author::{
, name = "Ashe"
, handle = "ectamorphic"
, picUrl = Some
"https://cdn.christine.website/file/christine-static/img/FFVV1InX0AkDX3f_cropped_smol.jpg"
, inSystem = True
}
, Author::{ name = "Nicole", handle = "Twi", inSystem = True }
, Author::{ name = "Mai", handle = "Mai", inSystem = True }
]
, port = defaultPort
, clackSet = [ "Ashlynn" ]
, resumeFname = "./static/resume/resume.md"
, webMentionEndpoint = defaultWebMentionEndpoint
, miToken = "${env:MI_TOKEN as Text ? ""}"
}
}
in Config::{ in Config::{
, signalboost = ./signalboost.dhall , signalboost = ./dhall/signalboost.dhall
, authors = ./dhall/authors.dhall
, clackSet = , clackSet =
[ "Ashlynn", "Terry Davis", "Dennis Ritchie", "Steven Hawking" ] [ "Ashlynn", "Terry Davis", "Dennis Ritchie", "Steven Hawking" ]
, jobHistory = ./dhall/jobHistory.dhall
} }

View File

@ -36,6 +36,7 @@ img {
.conversation-chat { .conversation-chat {
align-self: center; align-self: center;
min-width: 0;
} }
.gruvbox-dark pre, pre { .gruvbox-dark pre, pre {
@ -43,17 +44,6 @@ img {
padding-right: 1em; padding-right: 1em;
} }
/* xeblog-conv:not(:defined) { */
/* display: block; */
/* border-left: 0.25ch solid green; */
/* padding-left: 1.75ch; */
/* } */
/* xeblog-conv:before:not(:defined) { */
/* content: "<"attr(name)">"; */
/* font-weight: bold; */
/* } */
.warning { .warning {
background-color: #282828; background-color: #282828;
} }

View File

@ -48,7 +48,6 @@ in pkgs.stdenv.mkDerivation {
cp -rf $src/blog $out/blog cp -rf $src/blog $out/blog
cp -rf $src/css $out/css cp -rf $src/css $out/css
cp -rf $src/gallery $out/gallery cp -rf $src/gallery $out/gallery
cp -rf $src/signalboost.dhall $out/signalboost.dhall
cp -rf $src/static $out/static cp -rf $src/static $out/static
cp -rf $src/talks $out/talks cp -rf $src/talks $out/talks

30
dhall/authors.dhall Normal file
View File

@ -0,0 +1,30 @@
let Author = ./types/Author.dhall
in [ Author::{
, name = "Xe Iaso"
, handle = "xe"
, picUrl = Some "/static/img/avatar.png"
, link = Some "https://christine.website"
, twitter = Some "theprincessxena"
, default = True
, inSystem = True
}
, Author::{
, name = "Jessie"
, handle = "Heartmender"
, picUrl = Some
"https://cdn.christine.website/file/christine-static/img/UPRcp1pO_400x400.jpg"
, twitter = Some "BeJustFine"
, inSystem = True
}
, Author::{
, name = "Ashe"
, handle = "ectamorphic"
, picUrl = Some
"https://cdn.christine.website/file/christine-static/img/FFVV1InX0AkDX3f_cropped_smol.jpg"
, inSystem = True
}
, Author::{ name = "Nicole", handle = "Twi", inSystem = True }
, Author::{ name = "Mai", handle = "Mai", inSystem = True }
, Author::{ name = "Sephira", handle = "Sephie", inSystem = True }
]

346
dhall/jobHistory.dhall Normal file
View File

@ -0,0 +1,346 @@
let xesite = ./types/package.dhall
let Job = xesite.Job
let Salary = xesite.Salary
let Stock = xesite.Stock
let StockKind = xesite.StockKind
let Company = xesite.Company
let Location = xesite.Location
let annual = \(rate : Natural) -> Salary::{ amount = rate }
let hourly = \(rate : Natural) -> Salary::{ amount = rate, per = "hour" }
let annualCAD = \(rate : Natural) -> Salary::{ amount = rate, currency = "CAD" }
let mercerIsland =
Location::{
, city = "Mercer Island"
, stateOrProvince = "WA"
, country = "USA"
}
let bellevue = mercerIsland // { city = "Bellevue" }
let mountainView =
Location::{
, city = "Mountain View"
, stateOrProvince = "CA"
, country = "USA"
, remote = False
}
let sf = mountainView // { city = "San Fransisco" }
let montreal =
Location::{
, city = "Montreal"
, stateOrProvince = "QC"
, country = "CAN"
, remote = False
}
let ottawa =
Location::{ city = "Ottawa", stateOrProvince = "ON", country = "CAN" }
let imvu =
Company::{
, name = "IMVU"
, url = Some "https://imvu.com"
, tagline =
"a company whose mission is to help people find and communicate with eachother. Their main product is a 3D avatar-based chat client and its surrounding infrastructure allowing creators to make content for the avatars to wear."
, location = mountainView // { city = "Redwood City" }
}
let tailscale =
Company::{
, name = "Tailscale"
, url = Some "https://tailscale.com"
, tagline =
"a zero config VPN for building secure networks. Install on any device in minutes. Remote access from any network or physical location."
, location = ottawa // { city = "Toronto" }
}
in [ Job::{
, company = Company::{
, name = "Symplicity"
, tagline =
"a company that provides students with the tools and connections they need to enhance their employability while preparing to succeed in today's job market."
, url = Some "https://www.symplicity.com"
, location = Location::{
, city = "Arlington"
, stateOrProvince = "VA"
, country = "USA"
, remote = False
}
}
, title = "Junior Systems Administrator"
, startDate = "2013-11-11"
, endDate = Some "2014-01-06"
, daysWorked = Some 56
, salary = annual 50000
, leaveReason = Some "terminated"
, locations =
[ Location::{
, city = "Arlington"
, stateOrProvince = "VA"
, country = "USA"
, remote = False
}
]
, highlights = [ "Python message queue processing" ]
, hideFromResume = True
}
, Job::{
, company = Company::{
, name = "OpDemand"
, defunct = True
, tagline =
"the company behind the open source project Deis, a distributed platform-as-a-service (PaaS) designed from the ground up to emulate Heroku but on privately owned servers."
, location = Location::{
, city = "Boulder"
, stateOrProvince = "CO"
, country = "USA"
}
}
, title = "Software Engineering Intern"
, startDate = "2014-07-14"
, endDate = Some "2014-08-27"
, daysWorked = Some 44
, daysBetween = Some 189
, salary = annual 35000
, leaveReason = Some "terminated"
, locations = [ mercerIsland ]
, highlights =
[ "Built new base image for Deis components"
, "Research and development on a new builder component"
]
, hideFromResume = True
}
, Job::{
, company = Company::{
, name = "Appen"
, url = Some "https://appen.com/"
, tagline =
"is a company that uses crowdsourcing to have its customers submit tasks to be done, similar to Amazon's Mechanical Turk."
, location = mountainView // { city = "San Francisco", remote = True }
}
, title = "Consultant"
, contract = True
, startDate = "2014-09-17"
, endDate = Some "2014-10-15"
, daysWorked = Some 28
, daysBetween = Some 21
, salary = hourly 90
, leaveReason = Some "contract not renewed"
, locations = [ mercerIsland ]
, highlights =
[ "Research and development on scalable Linux deployments on AWS via CoreOS and Docker"
, "Development of in-house tools to speed instance creation"
, "Laid groundwork on the creation and use of better tools for managing large clusters of CoreOS and Fleet machines"
]
}
, Job::{
, company = Company::{
, name = "VTCSecure"
, url = Some "https://www.vtcsecure.com/"
, tagline =
"a company dedicated to helping with custom and standard audio/video conferencing solutions. They specialize in helping the deaf and blind communicate over today's infrastructure without any trouble on their end."
, location = Location::{
, city = "Clearwater"
, stateOrProvince = "FL"
, country = "USA"
}
}
, title = "Consultant"
, contract = True
, startDate = "2014-10-27"
, endDate = Some "2015-02-09"
, daysWorked = Some 105
, daysBetween = Some 12
, salary = hourly 90
, leaveReason = Some "contract not renewed"
, locations = [ mercerIsland ]
, highlights =
[ "Started groundwork for a dynamically scalable infrastructure on a project for helping the blind see things"
, "Developed a prototype of a new website for VTCSecure"
, "Education on best practices using Docker and CoreOS"
, "Learning Freeswitch"
]
}
, Job::{
, company = imvu
, title = "Site Reliability Engineer"
, startDate = "2015-03-30"
, endDate = Some "2016-03-07"
, daysWorked = Some 343
, daysBetween = Some 49
, salary = annual 125000 // { stock = Some Stock::{ amount = 20000 } }
, leaveReason = Some "demoted"
, locations = [ mountainView ]
, highlights =
[ "Wrote up technical designs"
, "Implemented technical designs on an over 800 machine cluster"
, "Continuous learning of a lot of very powerful systems and improving upon them when it is needed"
]
}
, Job::{
, company = imvu
, title = "Systems Administrator"
, startDate = "2016-03-08"
, endDate = Some "2016-04-01"
, daysWorked = Some 24
, daysBetween = Some 1
, salary = annual 105000
, leaveReason = Some "quit"
, locations = [ mountainView // { city = "Redwood City" } ]
}
, Job::{
, company = Company::{
, name = "Pure Storage"
, url = Some "https://www.purestorage.com/"
, tagline =
"a Mountain View, California-based enterprise data flash storage company founded in 2009. It is traded on the NYSE (PSTG)."
, location = mountainView
}
, title = "Member of Technical Staff"
, startDate = "2016-04-04"
, endDate = Some "2016-08-03"
, daysWorked = Some 121
, daysBetween = Some 3
, salary =
annual 135000
// { stock = Some Stock::{
, amount = 5000
, liquid = True
, kind = StockKind.Grant
}
}
, leaveReason = Some "quit"
, locations = [ mountainView ]
, highlights = [ "Python 2 code maintenance", "Working with Foone" ]
}
, Job::{
, company = Company::{
, name = "Backplane.io"
, defunct = True
, location = sf
}
, title = "Software Engineer"
, startDate = "2016-08-24"
, endDate = Some "2016-11-22"
, daysWorked = Some 90
, daysBetween = Some 21
, salary = annual 105000 // { stock = Some Stock::{ amount = 85000 } }
, leaveReason = Some "terminated"
, locations = [ sf ]
, highlights =
[ "Performance monitoring of production servers"
, "Continuous deployment and development in Go"
, "Learning a lot about HTTP/2 and load balancing"
]
}
, Job::{
, company = Company::{
, name = "MBO Partners (Heroku)"
, tagline = "a staffing agency used to contract me for Heroku."
, location = Location::{
, city = "Herndon"
, stateOrProvince = "VA"
, country = "USA"
}
}
, title = "Consultant"
, contract = True
, startDate = "2017-02-13"
, endDate = Some "2017-11-13"
, daysWorked = Some 273
, daysBetween = Some 83
, salary = hourly 120
, leaveReason = Some "hired"
, locations = [ mountainView ]
}
, Job::{
, company = Company::{
, name = "Heroku"
, url = Some "https://heroku.com"
, tagline =
"a cloud Platform-as-a-Service (PaaS) that created the term 'platform as a service'. Heroku currently supports several programming languages that are commonly used on the web. Heroku, one of the first cloud platforms, has been in development since June 2007, when it supported only the Ruby programming language, but now supports Java, Node.js, Scala, Clojure, Python, PHP, and Go."
, location = sf
}
, title = "Senior Software Engineer"
, startDate = "2017-11-13"
, endDate = Some "2019-03-08"
, daysWorked = Some 480
, daysBetween = Some 0
, salary = annual 150000
, leaveReason = Some "quit"
, locations = [ mountainView, bellevue ]
, highlights =
[ "JVM Application Metrics"
, "Go Runtime Metrics Agent"
, "Other backend fixes and improvements on Threshold Autoscaling and Threshold Alerting"
, "Public-facing blogpost writing"
]
}
, Job::{
, company = Company::{
, name = "Lightspeed POS"
, url = Some "https://lightspeedhq.com"
, tagline =
"a provider of retail, ecommerce and point-of-sale solutions for small and medium scale businesses."
, location = montreal
}
, title = "Expert principal en fiabilité du site"
, startDate = "2019-05-06"
, endDate = Some "2020-11-27"
, daysWorked = Some 540
, daysBetween = Some 48
, salary =
annualCAD 115000
// { stock = Some Stock::{ amount = 7500, liquid = True } }
, leaveReason = Some "quit"
, locations = [ montreal ]
, highlights =
[ "Migration from cloud to cloud"
, "Work on the cloud platform initiative"
, "Crafting reliable infrastructure for clients of customers"
, "Creation of an internally consistent and extensible command line interface for internal tooling"
]
}
, Job::{
, company = tailscale
, title = "Software Designer"
, startDate = "2020-12-14"
, endDate = Some "2022-03-01"
, daysWorked = Some 442
, daysBetween = Some 0
, salary = annualCAD 135000
, leaveReason = Some "raise"
, locations = [ montreal // { remote = True }, ottawa ]
, highlights =
[ "Go programming"
, "SQL integrations"
, "Public-facing content writing"
, "Customer support"
]
}
, Job::{
, company = tailscale
, title = "Archmage of Infrastructure"
, startDate = "2022-03-01"
, salary = annualCAD 147150
, locations = [ ottawa ]
, highlights =
[ "The first developer relations person at Tailscale"
, "Public-facing content writing"
, "Public speaking"
, "Developing custom integration solutions and supporting them"
]
}
]

38
dhall/resume.dhall Normal file
View File

@ -0,0 +1,38 @@
let xesite = ./types/package.dhall
let Resume = xesite.Resume
let Link = xesite.Link
in Resume::{
, hnLinks =
[ Link::{
, url = "https://news.ycombinator.com/item?id=29522941"
, title = "'Open Source' is Broken"
}
, Link::{
, url = "https://news.ycombinator.com/item?id=29167560"
, title = "The Surreal Horror of PAM"
}
, Link::{
, url = "https://news.ycombinator.com/item?id=27175960"
, title = "Systemd: The Good Parts"
}
, Link::{
, url = "https://news.ycombinator.com/item?id=26845355"
, title = "I Implemented /dev/printerfact in Rust"
}
, Link::{
, url = "https://news.ycombinator.com/item?id=25978511"
, title = "A Model for Identity in Software"
}
, Link::{
, url = "https://news.ycombinator.com/item?id=31390506"
, title = "Fly.io: The reclaimer of Heroku's magic"
}
, Link::{
, url = "https://news.ycombinator.com/item?id=31149801"
, title = "Crimes with Go Generics"
}
]
}

View File

@ -1,8 +1,4 @@
let Person = let Person = ./types/Person.dhall
{ Type = { name : Text, tags : List Text, gitLink : Text, twitter : Text }
, default =
{ name = "", tags = [] : List Text, gitLink = "", twitter = "" }
}
in [ Person::{ in [ Person::{
, name = "Christian Sullivan" , name = "Christian Sullivan"
@ -20,8 +16,8 @@ in [ Person::{
, "istio" , "istio"
, "typescript" , "typescript"
] ]
, gitLink = "https://github.com/euforic" , gitLink = Some "https://github.com/euforic"
, twitter = "https://twitter.com/euforic" , twitter = Some "https://twitter.com/euforic"
} }
, Person::{ , Person::{
, name = "David Roberts" , name = "David Roberts"
@ -41,8 +37,8 @@ in [ Person::{
, "embedded" , "embedded"
, "sql" , "sql"
] ]
, gitLink = "https://github.com/ddr0" , gitLink = Some "https://github.com/ddr0"
, twitter = "https://twitter.com/DDR_4" , twitter = Some "https://twitter.com/DDR_4"
} }
, Person::{ , Person::{
, name = "Faizan Jamil" , name = "Faizan Jamil"
@ -65,8 +61,7 @@ in [ Person::{
, "full-stack" , "full-stack"
, "linux" , "linux"
] ]
, gitLink = "https://github.com/faizjamil" , gitLink = Some "https://github.com/faizjamil"
, twitter = "N/A"
} }
, Person::{ , Person::{
, name = "Joseph Crawley" , name = "Joseph Crawley"
@ -80,8 +75,8 @@ in [ Person::{
, "bash" , "bash"
, "linux" , "linux"
] ]
, gitLink = "https://github.com/espe-on" , gitLink = Some "https://github.com/espe-on"
, twitter = "https://twitter.com/espe_on_" , twitter = Some "https://twitter.com/espe_on_"
} }
, Person::{ , Person::{
, name = "nicoo" , name = "nicoo"
@ -96,7 +91,7 @@ in [ Person::{
, "security" , "security"
, "SDR" , "SDR"
] ]
, gitLink = "https://github.com/nbraud" , gitLink = Some "https://github.com/nbraud"
} }
, Person::{ , Person::{
, name = "Prajjwal Singh" , name = "Prajjwal Singh"
@ -112,8 +107,8 @@ in [ Person::{
, "google-cloud" , "google-cloud"
, "typescript" , "typescript"
] ]
, gitLink = "https://github.com/Prajjwal" , gitLink = Some "https://github.com/Prajjwal"
, twitter = "https://twitter.com/prajjwalsin" , twitter = Some "https://twitter.com/prajjwalsin"
} }
, Person::{ , Person::{
, name = "Piyushh Bhutoria" , name = "Piyushh Bhutoria"
@ -125,8 +120,8 @@ in [ Person::{
, "php" , "php"
, "google-cloud" , "google-cloud"
] ]
, gitLink = "https://github.com/Piyushhbhutoria" , gitLink = Some "https://github.com/Piyushhbhutoria"
, twitter = "https://twitter.com/PiyushhB" , twitter = Some "https://twitter.com/PiyushhB"
} }
, Person::{ , Person::{
, name = "Ryan Casalino" , name = "Ryan Casalino"
@ -143,8 +138,7 @@ in [ Person::{
, "flask" , "flask"
, "unix" , "unix"
] ]
, gitLink = "https://github.com/rjpcasalino" , gitLink = Some "https://github.com/rjpcasalino"
, twitter = "N/A"
} }
, Person::{ , Person::{
, name = "Jeremy White" , name = "Jeremy White"
@ -163,8 +157,8 @@ in [ Person::{
, "google-cloud" , "google-cloud"
, "azure" , "azure"
] ]
, gitLink = "https://github.com/dudymas" , gitLink = Some "https://github.com/dudymas"
, twitter = "https://twitter.com/dudymas" , twitter = Some "https://twitter.com/dudymas"
} }
, Person::{ , Person::{
, name = "Zachary McKee" , name = "Zachary McKee"
@ -181,14 +175,12 @@ in [ Person::{
, "nginx" , "nginx"
, "gunicorn" , "gunicorn"
] ]
, gitLink = "https://github.com/ZacharyRMcKee" , gitLink = Some "https://github.com/ZacharyRMcKee"
, twitter = "N/A"
} }
, Person::{ , Person::{
, name = "Muazzam Kazmi" , name = "Muazzam Kazmi"
, tags = [ "Rust", "C++", "x86assembly", "WinAPI", "Node.js", "React.js" ] , tags = [ "Rust", "C++", "x86assembly", "WinAPI", "Node.js", "React.js" ]
, gitLink = "https://github.com/muazzamalikazmi" , gitLink = Some "https://github.com/muazzamalikazmi"
, twitter = "N/A"
} }
, Person::{ , Person::{
, name = "Jeffin Mathew" , name = "Jeffin Mathew"
@ -202,8 +194,8 @@ in [ Person::{
, "javascript" , "javascript"
, "iot" , "iot"
] ]
, gitLink = "https://github.com/mjeffin" , gitLink = Some "https://github.com/mjeffin"
, twitter = "https://twitter.com/mpjeffin" , twitter = Some "https://twitter.com/mpjeffin"
} }
, Person::{ , Person::{
, name = "Nasir Hussain" , name = "Nasir Hussain"
@ -218,24 +210,17 @@ in [ Person::{
, "golang" , "golang"
, "rpm packaging" , "rpm packaging"
] ]
, gitLink = "https://github.com/nasirhm" , gitLink = Some "https://github.com/nasirhm"
, twitter = "https://twitter.com/_nasirhm_" , twitter = Some "https://twitter.com/_nasirhm_"
} }
, Person::{ , Person::{
, name = "Avi Parshan" , name = "Avi Parshan"
, tags = , tags =
[ "python" [ "python", "windows", "javascript", "html", "android", "java", "C#" ]
, "windows" , gitLink = Some "https://github.com/avipars"
, "javascript" , twitter = Some "https://twitter.com/aviinfinity"
, "html"
, "android"
, "java"
, "C#"
]
, gitLink = "https://github.com/avipars"
, twitter = "https://twitter.com/aviinfinity"
} }
, Person:: { , Person::{
, name = "Tommy Nguyen" , name = "Tommy Nguyen"
, tags = , tags =
[ "c++" [ "c++"
@ -246,6 +231,54 @@ in [ Person::{
, "web" , "web"
, "google-cloud-platform" , "google-cloud-platform"
] ]
, gitLink = "https://github.com/remyabel" , gitLink = Some "https://github.com/remyabel"
}
, Person::{
, name = "Krish Jain"
, tags =
[ "c++", "linux", "c", "python", "ios", "nlp", "machine learning" ]
, gitLink = Some "https://github.com/Krish-sysadmin"
, twitter = Some "https://twitter.com/krishjain02"
}
, Person::{
, name = "Henri Shustak"
, tags =
[ "backend"
, "generalist"
, "documentation"
, "support"
, "electronics"
, "javascript"
, "python"
, "ruby"
, "bash"
, "sh"
, "fish"
, "zsh"
, "tsch"
, "software"
, "full-stack"
, "linux"
, "R&D"
, "SRE / system adminsitration"
]
, gitLink = Some "https://github.com/henri"
, twitter = Some "https://twitter.com/henri_shustak"
}
, Person::{
, name = "Gabriel Simmer"
, tags =
[ "golang"
, "backend"
, "javascript"
, "python"
, "software"
, "full-stack"
, "linux"
, "devops"
, "developer tooling"
]
, gitLink = Some "https://github.com/gmemstr"
, twitter = Some "https://twitter.com/gmem_"
} }
] ]

19
dhall/types/Author.dhall Normal file
View File

@ -0,0 +1,19 @@
{ Type =
{ name : Text
, handle : Text
, picUrl : Optional Text
, link : Optional Text
, twitter : Optional Text
, default : Bool
, inSystem : Bool
}
, default =
{ name = ""
, handle = ""
, picUrl = None Text
, link = None Text
, twitter = None Text
, default = False
, inSystem = False
}
}

17
dhall/types/Company.dhall Normal file
View File

@ -0,0 +1,17 @@
let Location = ./Location.dhall
in { Type =
{ name : Text
, url : Optional Text
, tagline : Text
, location : Location.Type
, defunct : Bool
}
, default =
{ name = ""
, url = None Text
, tagline = ""
, location = Location::{=}
, defunct = False
}
}

33
dhall/types/Config.dhall Normal file
View File

@ -0,0 +1,33 @@
let Person = ./Person.dhall
let Author = ./Author.dhall
let Job = ./Job.dhall
let defaultPort = env:PORT ? 3030
let defaultWebMentionEndpoint =
env:WEBMENTION_ENDPOINT
? "https://mi.within.website/api/webmention/accept"
in { Type =
{ signalboost : List Person.Type
, authors : List Author.Type
, port : Natural
, clackSet : List Text
, resumeFname : Text
, webMentionEndpoint : Text
, miToken : Text
, jobHistory : List Job.Type
}
, default =
{ signalboost = [] : List Person.Type
, authors = [] : List Author.Type
, port = defaultPort
, clackSet = [ "Ashlynn" ]
, resumeFname = "./static/resume/resume.md"
, webMentionEndpoint = defaultWebMentionEndpoint
, miToken = "${env:MI_TOKEN as Text ? ""}"
, jobHistory = [] : List Job.Type
}
}

Some files were not shown because too many files have changed in this diff Show More