Merge pull request #4 from Xe/Xe/feat/server-side-rendering
Use server-side rendering, redo frontend with hack.css
|
@ -1,4 +1,4 @@
|
|||
FROM xena/christine.website
|
||||
ENV PORT 5000
|
||||
EXPOSE 5000
|
||||
RUN apk add --no-cache bash
|
||||
FROM xena/christine.website
|
||||
ENV PORT 5000
|
||||
EXPOSE 5000
|
||||
RUN apk add --no-cache bash
|
32
LICENSE
|
@ -1,19 +1,19 @@
|
|||
Copyright (c) 2016 Christine Dodrill <me@christine.website>
|
||||
Copyright (c) 2017 Christine Dodrill <me@christine.website>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
Permission is granted to anyone to use this software for any purpose,
|
||||
including commercial applications, and to alter it and redistribute it
|
||||
freely, subject to the following restrictions:
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
1. The origin of this software must not be misrepresented; you must not
|
||||
claim that you wrote the original software. If you use this software
|
||||
in a product, an acknowledgement in the product documentation would be
|
||||
appreciated but is not required.
|
||||
|
||||
2. Altered source versions must be plainly marked as such, and must not be
|
||||
misrepresented as being the original software.
|
||||
|
||||
3. This notice may not be removed or altered from any source distribution.
|
|
@ -1,8 +1,5 @@
|
|||
# My Site
|
||||
# site
|
||||
|
||||
Version 2
|
||||
My personal/portfolio website.
|
||||
|
||||
This is intended as my portfolio site. This is a site made with [pux](https://github.com/alexmingoia/purescript-pux)
|
||||
and [Go](https://golang.org).
|
||||
|
||||
![](http://i.imgur.com/MOhMzmB.png)
|
||||
![](https://puu.sh/vWnJx/57cda175d8.png)
|
||||
|
|
|
@ -1,192 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Xe/asarfs"
|
||||
"github.com/gernest/front"
|
||||
"github.com/urfave/negroni"
|
||||
)
|
||||
|
||||
// Post is a single post summary for the menu.
|
||||
type Post struct {
|
||||
Title string `json:"title"`
|
||||
Link string `json:"link"`
|
||||
Summary string `json:"summary,omitifempty"`
|
||||
Body string `json:"body, omitifempty"`
|
||||
Date string `json:"date"`
|
||||
}
|
||||
|
||||
// Posts implements sort.Interface for a slice of Post objects.
|
||||
type Posts []*Post
|
||||
|
||||
func (p Posts) Len() int { return len(p) }
|
||||
func (p Posts) Less(i, j int) bool {
|
||||
iDate, _ := time.Parse("2006-01-02", p[i].Date)
|
||||
jDate, _ := time.Parse("2006-01-02", p[j].Date)
|
||||
|
||||
return iDate.Unix() < jDate.Unix()
|
||||
}
|
||||
func (p Posts) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
|
||||
var (
|
||||
posts Posts
|
||||
rbody string
|
||||
)
|
||||
|
||||
func init() {
|
||||
err := filepath.Walk("./blog/", func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
fin, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer fin.Close()
|
||||
|
||||
content, err := ioutil.ReadAll(fin)
|
||||
if err != nil {
|
||||
// handle error
|
||||
}
|
||||
|
||||
m := front.NewMatter()
|
||||
m.Handle("---", front.YAMLHandler)
|
||||
front, _, err := m.Parse(bytes.NewReader(content))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sp := strings.Split(string(content), "\n")
|
||||
sp = sp[4:]
|
||||
data := strings.Join(sp, "\n")
|
||||
|
||||
p := &Post{
|
||||
Title: front["title"].(string),
|
||||
Date: front["date"].(string),
|
||||
Link: strings.Split(path, ".")[0],
|
||||
Body: data,
|
||||
}
|
||||
|
||||
posts = append(posts, p)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
sort.Sort(sort.Reverse(posts))
|
||||
|
||||
resume, err := ioutil.ReadFile("./static/resume/resume.md")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
rbody = string(resume)
|
||||
}
|
||||
|
||||
func main() {
|
||||
mux := http.NewServeMux()
|
||||
|
||||
mux.HandleFunc("/health", func(w http.ResponseWriter, r *http.Request) {})
|
||||
mux.HandleFunc("/api/blog/posts", writeBlogPosts)
|
||||
mux.HandleFunc("/api/blog/post", func(w http.ResponseWriter, r *http.Request) {
|
||||
q := r.URL.Query()
|
||||
name := q.Get("name")
|
||||
|
||||
if name == "" {
|
||||
goto fail
|
||||
}
|
||||
|
||||
for _, p := range posts {
|
||||
if strings.HasSuffix(p.Link, name) {
|
||||
json.NewEncoder(w).Encode(p)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
fail:
|
||||
http.Error(w, "Not Found", http.StatusNotFound)
|
||||
})
|
||||
mux.HandleFunc("/api/resume", func(w http.ResponseWriter, r *http.Request) {
|
||||
json.NewEncoder(w).Encode(struct {
|
||||
Body string `json:"body"`
|
||||
}{
|
||||
Body: rbody,
|
||||
})
|
||||
})
|
||||
|
||||
if os.Getenv("USE_ASAR") == "yes" {
|
||||
log.Println("serving site frontend from asar file")
|
||||
|
||||
do404 := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Error(w, "Not found", http.StatusNotFound)
|
||||
})
|
||||
fe, err := asarfs.New("./frontend.asar", do404)
|
||||
if err != nil {
|
||||
log.Fatal("frontend: ", err)
|
||||
}
|
||||
|
||||
mux.Handle("/dist/", http.FileServer(fe))
|
||||
} else {
|
||||
log.Println("serving site frontend from filesystem")
|
||||
mux.Handle("/dist/", http.FileServer(http.Dir("./frontend/static/")))
|
||||
}
|
||||
|
||||
mux.Handle("/static/", http.FileServer(http.Dir(".")))
|
||||
mux.HandleFunc("/", writeIndexHTML)
|
||||
|
||||
port := os.Getenv("PORT")
|
||||
if port == "" {
|
||||
port = "9090"
|
||||
}
|
||||
|
||||
mux.HandleFunc("/blog.rss", createFeed)
|
||||
mux.HandleFunc("/blog.atom", createAtom)
|
||||
mux.HandleFunc("/keybase.txt", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.ServeFile(w, r, "./static/keybase.txt")
|
||||
})
|
||||
|
||||
n := negroni.Classic()
|
||||
n.UseHandler(mux)
|
||||
|
||||
log.Fatal(http.ListenAndServe(":"+port, n))
|
||||
}
|
||||
|
||||
func writeBlogPosts(w http.ResponseWriter, r *http.Request) {
|
||||
p := []interface{}{}
|
||||
for _, post := range posts {
|
||||
p = append(p, struct {
|
||||
Title string `json:"title"`
|
||||
Link string `json:"link"`
|
||||
Summary string `json:"summary,omitifempty"`
|
||||
Date string `json:"date"`
|
||||
}{
|
||||
Title: post.Title,
|
||||
Link: post.Link,
|
||||
Summary: post.Summary,
|
||||
Date: post.Date,
|
||||
})
|
||||
}
|
||||
json.NewEncoder(w).Encode(p)
|
||||
}
|
||||
|
||||
func writeIndexHTML(w http.ResponseWriter, r *http.Request) {
|
||||
http.ServeFile(w, r, "./frontend/static/dist/index.html")
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/Xe/ln"
|
||||
"github.com/gorilla/feeds"
|
||||
)
|
||||
|
||||
var bootTime = time.Now()
|
||||
|
||||
var feed = &feeds.Feed{
|
||||
Title: "Christine Dodrill's Blog",
|
||||
Link: &feeds.Link{Href: "https://christine.website/blog"},
|
||||
Description: "My blog posts and rants about various technology things.",
|
||||
Author: &feeds.Author{Name: "Christine Dodrill", Email: "me@christine.website"},
|
||||
Created: bootTime,
|
||||
Copyright: "This work is copyright Christine Dodrill. My viewpoints are my own and not the view of any employer past, current or future.",
|
||||
}
|
||||
|
||||
func init() {
|
||||
for _, item := range posts {
|
||||
itime, _ := time.Parse("2006-01-02", item.Date)
|
||||
feed.Items = append(feed.Items, &feeds.Item{
|
||||
Title: item.Title,
|
||||
Link: &feeds.Link{Href: "https://christine.website/" + item.Link},
|
||||
Description: item.Summary,
|
||||
Created: itime,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// IncrediblySecureSalt *******
|
||||
const IncrediblySecureSalt = "hunter2"
|
||||
|
||||
func createFeed(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/rss+xml")
|
||||
w.Header().Set("ETag", Hash(bootTime.String(), IncrediblySecureSalt))
|
||||
|
||||
err := feed.WriteRss(w)
|
||||
if err != nil {
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
ln.Error(err, ln.F{
|
||||
"remote_addr": r.RemoteAddr,
|
||||
"action": "generating_rss",
|
||||
"uri": r.RequestURI,
|
||||
"host": r.Host,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func createAtom(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/atom+xml")
|
||||
w.Header().Set("ETag", Hash(bootTime.String(), IncrediblySecureSalt))
|
||||
|
||||
err := feed.WriteAtom(w)
|
||||
if err != nil {
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
ln.Error(err, ln.F{
|
||||
"remote_addr": r.RemoteAddr,
|
||||
"action": "generating_rss",
|
||||
"uri": r.RequestURI,
|
||||
"host": r.Host,
|
||||
})
|
||||
}
|
||||
}
|
29
box.rb
|
@ -20,16 +20,14 @@ def put(file)
|
|||
end
|
||||
|
||||
files = [
|
||||
"backend",
|
||||
"blog",
|
||||
"frontend.asar",
|
||||
"static",
|
||||
"build.sh",
|
||||
"run.sh",
|
||||
|
||||
# This file is packaged in the asar file, but the go app relies on being
|
||||
# able to read it so it can cache the contents in ram.
|
||||
"frontend/static/dist/index.html",
|
||||
"templates",
|
||||
"gops.go",
|
||||
"hash.go",
|
||||
"html.go",
|
||||
"main.go",
|
||||
"rice-box.go",
|
||||
"rss.go",
|
||||
]
|
||||
|
||||
files.each { |x| put x }
|
||||
|
@ -37,18 +35,13 @@ files.each { |x| put x }
|
|||
copy "vendor/", "/root/go/src/"
|
||||
|
||||
### Build
|
||||
run "apk add --no-cache --virtual site-builddep build-base"
|
||||
run %q[ cd /site && sh ./build.sh ]
|
||||
debug! if debug?
|
||||
run "cd /site && go1.8.1 build -v"
|
||||
|
||||
### Cleanup
|
||||
run %q[ rm -rf /root/go /site/backend /root/sdk ]
|
||||
run %q[ apk del git go1.8.1 site-builddep ]
|
||||
run %q[ rm -rf /root/go /site/backend /root/sdk /site/*.go ]
|
||||
run %q[ apk del git go1.8.1 ]
|
||||
|
||||
### Runtime
|
||||
cmd "/site/run.sh"
|
||||
|
||||
env "USE_ASAR" => "yes"
|
||||
cmd "/site/site"
|
||||
|
||||
flatten
|
||||
tag "xena/christine.website"
|
||||
|
|
11
build.sh
|
@ -1,11 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
set -x
|
||||
|
||||
export PATH="$PATH:/usr/local/go/bin"
|
||||
export CI="true"
|
||||
|
||||
cd /site/backend/christine.website
|
||||
go1.8.1 build -v
|
||||
mv christine.website /usr/bin
|
|
@ -1,23 +0,0 @@
|
|||
local sh = require "sh"
|
||||
local fs = require "fs"
|
||||
|
||||
sh { abort = true }
|
||||
|
||||
local cd = function(path)
|
||||
local ok, err = fs.chdir(path)
|
||||
if err ~= nil then
|
||||
error(err)
|
||||
end
|
||||
end
|
||||
|
||||
cd "frontend"
|
||||
sh.rm("-rf", "node_modules", "bower_components"):ok()
|
||||
print "running npm install..."
|
||||
sh.npm("install"):print()
|
||||
print "running npm run build..."
|
||||
sh.npm("run", "build"):print()
|
||||
print "packing frontend..."
|
||||
sh.asar("pack", "static", "../frontend.asar"):print()
|
||||
cd ".."
|
||||
|
||||
sh.box("box.rb"):print()
|
|
@ -1,13 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -x
|
||||
|
||||
(cd frontend \
|
||||
&& rm -rf node_modules bower_components \
|
||||
&& npm install && npm run build \
|
||||
&& asar pack static ../frontend.asar \
|
||||
&& cd .. \
|
||||
&& keybase sign -d -i ./frontend.asar -o ./frontend.asar.sig)
|
||||
|
||||
box box.rb
|
|
@ -0,0 +1 @@
|
|||
.solarized-dark{background-color:#073642;color:#78909c}.solarized-dark h1,.solarized-dark h2,.solarized-dark h3,.solarized-dark h4,.solarized-dark h5,.solarized-dark h6{color:#1e88e5}.solarized-dark h1 a,.solarized-dark h2 a,.solarized-dark h3 a,.solarized-dark h4 a,.solarized-dark h5 a,.solarized-dark h6 a{color:#1e88e5;border-bottom-color:#1e88e5}.solarized-dark h1 a:hover,.solarized-dark h2 a:hover,.solarized-dark h3 a:hover,.solarized-dark h4 a:hover,.solarized-dark h5 a:hover,.solarized-dark h6 a:hover{background-color:#1e88e5;color:#fff}.solarized-dark pre{background-color:#073642;padding:0;border:none}.solarized-dark pre code{color:#009688}.solarized-dark h1 a,.solarized-dark h2 a,.solarized-dark h3 a,.solarized-dark h4 a,.solarized-dark h5 a{color:#78909c}.solarized-dark code,.solarized-dark strong{color:#90a4ae}.solarized-dark code{font-weight:100}.solarized-dark .progress-bar-filled{background-color:#558b2f}.solarized-dark .progress-bar-filled:after,.solarized-dark .progress-bar-filled:before{color:#90a4ae}.solarized-dark table{color:#78909c}.solarized-dark table td,.solarized-dark table th{border-color:#b0bec5}.solarized-dark table tbody td:first-child{color:#b0bec5}.solarized-dark .form-group label{color:#78909c;border-color:#90a4ae}.solarized-dark .form-group.form-textarea label:after{background-color:#073642}.solarized-dark .form-control{color:#78909c;border-color:#90a4ae}.solarized-dark .form-control:focus{border-color:#cfd8dc;color:#cfd8dc}.solarized-dark textarea.form-control{color:#78909c}.solarized-dark .card{border-color:#90a4ae}.solarized-dark .card .card-header{background-color:transparent;color:#78909c;border-bottom:1px solid #90a4ae}.solarized-dark .btn.btn-ghost.btn-default{border-color:#607d8b;color:#607d8b}.solarized-dark .btn.btn-ghost.btn-default:focus,.solarized-dark .btn.btn-ghost.btn-default:hover{border-color:#90a4ae;color:#90a4ae;z-index:1}.solarized-dark .btn.btn-ghost.btn-default:focus,.solarized-dark .btn.btn-ghost.btn-default:hover{border-color:#e0e0e0;color:#e0e0e0}.solarized-dark .btn.btn-ghost.btn-primary:focus,.solarized-dark .btn.btn-ghost.btn-primary:hover{border-color:#64b5f6;color:#64b5f6}.solarized-dark .btn.btn-ghost.btn-success:focus,.solarized-dark .btn.btn-ghost.btn-success:hover{border-color:#81c784;color:#81c784}.solarized-dark .btn.btn-ghost.btn-info:focus,.solarized-dark .btn.btn-ghost.btn-info:hover{border-color:#4dd0e1;color:#4dd0e1}.solarized-dark .btn.btn-ghost.btn-error:focus,.solarized-dark .btn.btn-ghost.btn-error:hover{border-color:#e57373;color:#e57373}.solarized-dark .btn.btn-ghost.btn-warning:focus,.solarized-dark .btn.btn-ghost.btn-warning:hover{border-color:#ffb74d;color:#ffb74d}.solarized-dark .avatarholder,.solarized-dark .placeholder{background-color:transparent;border-color:#90a4ae}.solarized-dark .menu .menu-item{color:#78909c;border-color:#90a4ae}.solarized-dark .menu .menu-item.active,.solarized-dark .menu .menu-item:hover{color:#fff;border-color:#78909c}
|
|
@ -1,9 +0,0 @@
|
|||
node_modules/
|
||||
bower_components/
|
||||
output/
|
||||
dist/
|
||||
static/dist
|
||||
.psci_modules
|
||||
npm-debug.log
|
||||
**DS_Store
|
||||
.psc-ide-port
|
|
@ -1,24 +0,0 @@
|
|||
Copyright (c) 2016, Alexander C. Mingoia
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the <organization> nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1,38 +0,0 @@
|
|||
# pux-starter-app
|
||||
|
||||
Starter [Pux](https://github.com/alexmingoia/purescript-pux/) application using
|
||||
webpack with hot-reloading and time-travel debug using
|
||||
[pux-devtool](https://github.com/alexmingoia/pux-devtool).
|
||||
|
||||
See the [Guide](https://alexmingoia.github.io/purescript-pux) for help learning
|
||||
Pux.
|
||||
|
||||
![Pux starter app animation](support/pux-starter-app.gif)
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
git clone git://github.com/alexmingoia/pux-starter-app.git example
|
||||
cd example
|
||||
npm install
|
||||
npm start
|
||||
```
|
||||
|
||||
Visit `http://localhost:3000` in your browser, edit `src/purs/Layout.purs`
|
||||
and watch the magic!
|
||||
|
||||
## Available scripts
|
||||
|
||||
### watch
|
||||
|
||||
`npm start` or `npm run watch` will start a development server, which
|
||||
hot-reloads your application when sources changes.
|
||||
|
||||
### serve
|
||||
|
||||
`npm run serve` serves your application without watching for changes or
|
||||
hot-reloading.
|
||||
|
||||
### build
|
||||
|
||||
`npm run build` bundles and minifies your application to run in production mode.
|
|
@ -1,19 +0,0 @@
|
|||
{
|
||||
"name": "pux-starter-app",
|
||||
"homepage": "https://github.com/alexmingoia/pux-starter-app",
|
||||
"authors": [
|
||||
"Alex Mingoia <talk@alexmingoia.com>"
|
||||
],
|
||||
"description": "Starter Pux application using webpack with hot-reloading.",
|
||||
"main": "support/index.js",
|
||||
"license": "BSD3",
|
||||
"dependencies": {
|
||||
"purescript-pux": "^7.0.0",
|
||||
"purescript-pux-devtool": "^4.1.0",
|
||||
"purescript-argonaut": "^2.0.0",
|
||||
"purescript-affjax": "^3.0.2"
|
||||
},
|
||||
"resolutions": {
|
||||
"purescript-dom": "^3.1.0"
|
||||
}
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
{
|
||||
"name": "christine-website",
|
||||
"version": "0.1.0",
|
||||
"description": "Starter Pux application using webpack with hot-reloading.",
|
||||
"main": "support/index.js",
|
||||
"keywords": [
|
||||
"pux",
|
||||
"purescript-pux",
|
||||
"boilerplate",
|
||||
"starter-app"
|
||||
],
|
||||
"scripts": {
|
||||
"postinstall": "bower cache clean && bower install",
|
||||
"clean": "rimraf static/dist && rimraf dist && rimraf output",
|
||||
"build": "webpack --config ./webpack.production.config.js --progress --profile --colors",
|
||||
"watch": "npm run clean && node ./webpack.config.js",
|
||||
"serve": "http-server static --cors -p 3000",
|
||||
"start": "npm run watch",
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/alexmingoia/pux-starter-app.git"
|
||||
},
|
||||
"author": "Alexander C. Mingoia",
|
||||
"license": "BSD-3-Clause",
|
||||
"bugs": {
|
||||
"url": "https://github.com/alexmingoia/pux-starter-app/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"bower": "^1.7.9",
|
||||
"connect-history-api-fallback": "^1.2.0",
|
||||
"express": "^4.13.4",
|
||||
"favicons-webpack-plugin": "0.0.7",
|
||||
"html-webpack-plugin": "^2.15.0",
|
||||
"http-server": "^0.9.0",
|
||||
"purescript": "^0.10.1",
|
||||
"purescript-psa": "^0.3.9",
|
||||
"purs-loader": "^2.0.0",
|
||||
"react": "^15.0.0",
|
||||
"react-document-title": "^2.0.2",
|
||||
"react-dom": "^15.0.0",
|
||||
"rimraf": "^2.5.2",
|
||||
"showdown": "^1.6.0",
|
||||
"webpack": "^2.1.0-beta.25",
|
||||
"webpack-uglify-js-plugin": "^1.1.9"
|
||||
},
|
||||
"devDependencies": {
|
||||
"source-map-loader": "^0.1.5",
|
||||
"webpack-dev-middleware": "^1.8.3",
|
||||
"webpack-hot-middleware": "^2.12.2"
|
||||
}
|
||||
}
|
|
@ -1,72 +0,0 @@
|
|||
module App.BlogEntry where
|
||||
|
||||
import App.Utils (mdify)
|
||||
import Control.Monad.Aff (attempt)
|
||||
import DOM (DOM)
|
||||
import Data.Argonaut (class DecodeJson, decodeJson, (.?))
|
||||
import Data.Either (Either(..), either)
|
||||
import Data.Maybe (Maybe(..))
|
||||
import Network.HTTP.Affjax (AJAX, get)
|
||||
import Prelude (bind, pure, show, ($), (<>), (<<<))
|
||||
import Pux (noEffects, EffModel)
|
||||
import Pux.DocumentTitle (documentTitle)
|
||||
import Pux.Html (Html, div, h1, p, text)
|
||||
import Pux.Html.Attributes (dangerouslySetInnerHTML, className, id_, title)
|
||||
|
||||
data Action = RequestPost
|
||||
| ReceivePost (Either String Post)
|
||||
|
||||
type State =
|
||||
{ status :: String
|
||||
, id :: Maybe Int
|
||||
, post :: Post
|
||||
, name :: String }
|
||||
|
||||
data Post = Post
|
||||
{ title :: String
|
||||
, body :: String
|
||||
, date :: String }
|
||||
|
||||
instance decodeJsonPost :: DecodeJson Post where
|
||||
decodeJson json = do
|
||||
obj <- decodeJson json
|
||||
title <- obj .? "title"
|
||||
body <- obj .? "body"
|
||||
date <- obj .? "date"
|
||||
pure $ Post { title: title, body: body, date: date }
|
||||
|
||||
init :: State
|
||||
init =
|
||||
{ status: "Loading..."
|
||||
, post: Post
|
||||
{ title: ""
|
||||
, body: ""
|
||||
, date: "" }
|
||||
, name: ""
|
||||
, id: Nothing }
|
||||
|
||||
update :: Action -> State -> EffModel State Action (ajax :: AJAX, dom :: DOM)
|
||||
update (ReceivePost (Left err)) state =
|
||||
noEffects $ state { id = Nothing, status = err }
|
||||
update (ReceivePost (Right post)) state = noEffects $ state { status = "", id = Just 1, post = post }
|
||||
update RequestPost state =
|
||||
{ state: state
|
||||
, effects: [ do
|
||||
res <- attempt $ get ("/api/blog/post?name=" <> state.name)
|
||||
let decode r = decodeJson r.response :: Either String Post
|
||||
let post = either (Left <<< show) decode res
|
||||
pure $ ReceivePost post
|
||||
]
|
||||
}
|
||||
|
||||
view :: State -> Html Action
|
||||
view { id: id, status: status, post: (Post post) } =
|
||||
case id of
|
||||
Nothing -> div [] []
|
||||
(Just _) ->
|
||||
div [ className "row" ]
|
||||
[ h1 [] [ text status ]
|
||||
, documentTitle [ title $ post.title <> " - Christine Dodrill" ] []
|
||||
, div [ className "col s8 offset-s2" ]
|
||||
[ p [ id_ "blogpost", dangerouslySetInnerHTML $ mdify post.body ] [] ]
|
||||
]
|
|
@ -1,86 +0,0 @@
|
|||
module App.BlogIndex where
|
||||
|
||||
import Control.Monad.Aff (attempt)
|
||||
import DOM (DOM)
|
||||
import Data.Argonaut (class DecodeJson, decodeJson, (.?))
|
||||
import Data.Either (Either(Left, Right), either)
|
||||
import Network.HTTP.Affjax (AJAX, get)
|
||||
import Prelude (($), bind, map, const, show, (<>), pure, (<<<))
|
||||
import Pux (EffModel, noEffects)
|
||||
import Pux.DocumentTitle (documentTitle)
|
||||
import Pux.Html (Html, br, div, h1, ol, li, button, text, span, p)
|
||||
import Pux.Html.Attributes (className, id_, key, title)
|
||||
import Pux.Html.Events (onClick)
|
||||
import Pux.Router (link)
|
||||
|
||||
data Action = RequestPosts
|
||||
| ReceivePosts (Either String Posts)
|
||||
|
||||
type State =
|
||||
{ posts :: Posts
|
||||
, status :: String }
|
||||
|
||||
data Post = Post
|
||||
{ title :: String
|
||||
, link :: String
|
||||
, summary :: String
|
||||
, date :: String }
|
||||
|
||||
type Posts = Array Post
|
||||
|
||||
instance decodeJsonPost :: DecodeJson Post where
|
||||
decodeJson json = do
|
||||
obj <- decodeJson json
|
||||
title <- obj .? "title"
|
||||
link <- obj .? "link"
|
||||
summ <- obj .? "summary"
|
||||
date <- obj .? "date"
|
||||
pure $ Post { title: title, link: link, summary: summ, date: date }
|
||||
|
||||
init :: State
|
||||
init =
|
||||
{ posts: []
|
||||
, status: "" }
|
||||
|
||||
update :: Action -> State -> EffModel State Action (ajax :: AJAX, dom :: DOM)
|
||||
update (ReceivePosts (Left err)) state =
|
||||
noEffects $ state { status = ("error: " <> err) }
|
||||
update (ReceivePosts (Right posts)) state =
|
||||
noEffects $ state { posts = posts, status = "" }
|
||||
update RequestPosts state =
|
||||
{ state: state { status = "Loading..." }
|
||||
, effects: [ do
|
||||
res <- attempt $ get "/api/blog/posts"
|
||||
let decode r = decodeJson r.response :: Either String Posts
|
||||
let posts = either (Left <<< show) decode res
|
||||
pure $ ReceivePosts posts
|
||||
]
|
||||
}
|
||||
|
||||
post :: Post -> Html Action
|
||||
post (Post state) =
|
||||
div
|
||||
[ className "col s6" ]
|
||||
[ div
|
||||
[ className "card pink lighten-5" ]
|
||||
[ div
|
||||
[ className "card-content black-text" ]
|
||||
[ span [ className "card-title" ] [ text state.title ]
|
||||
, br [] []
|
||||
, p [] [ text ("Posted on: " <> state.date) ]
|
||||
, span [] [ text state.summary ]
|
||||
]
|
||||
, div
|
||||
[ className "card-action pink lighten-5" ]
|
||||
[ link state.link [] [ text "Read More" ] ]
|
||||
]
|
||||
]
|
||||
|
||||
view :: State -> Html Action
|
||||
view state =
|
||||
div
|
||||
[]
|
||||
[ h1 [] [ text "Posts" ]
|
||||
, documentTitle [ title "Posts - Christine Dodrill" ] []
|
||||
, div [ className "row" ] $ map post state.posts
|
||||
, p [] [ text state.status ] ]
|
|
@ -1,40 +0,0 @@
|
|||
module App.Counter where
|
||||
|
||||
import Prelude ((+), (-), const, show)
|
||||
import Pux.Html (Html, a, br, div, span, text)
|
||||
import Pux.Html.Attributes (className, href)
|
||||
import Pux.Html.Events (onClick)
|
||||
|
||||
data Action = Increment | Decrement
|
||||
|
||||
type State = Int
|
||||
|
||||
init :: State
|
||||
init = 0
|
||||
|
||||
update :: Action -> State -> State
|
||||
update Increment state = state + 1
|
||||
update Decrement state = state - 1
|
||||
|
||||
view :: State -> Html Action
|
||||
view state =
|
||||
div
|
||||
[ className "row" ]
|
||||
[ div
|
||||
[ className "col s4 offset-s4" ]
|
||||
[ div
|
||||
[ className "card blue-grey darken-1" ]
|
||||
[ div
|
||||
[ className "card-content white-text" ]
|
||||
[ span [ className "card-title" ] [ text "Counter" ]
|
||||
, br [] []
|
||||
, span [] [ text (show state) ]
|
||||
]
|
||||
, div
|
||||
[ className "card-action" ]
|
||||
[ a [ onClick (const Increment), href "#" ] [ text "Increment" ]
|
||||
, a [ onClick (const Decrement), href "#" ] [ text "Decrement" ]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
|
@ -1,188 +0,0 @@
|
|||
module App.Layout where
|
||||
|
||||
import App.BlogEntry as BlogEntry
|
||||
import App.BlogIndex as BlogIndex
|
||||
import App.Counter as Counter
|
||||
import App.Resume as Resume
|
||||
import Pux.Html as H
|
||||
import App.Routes (Route(..))
|
||||
import Control.Monad.RWS (state)
|
||||
import DOM (DOM)
|
||||
import Network.HTTP.Affjax (AJAX)
|
||||
import Prelude (($), (#), map, pure)
|
||||
import Pux (EffModel, noEffects, mapEffects, mapState)
|
||||
import Pux.DocumentTitle (documentTitle)
|
||||
import Pux.Html (style, Html, a, code, div, h1, h2, h3, h4, li, nav, p, pre, text, ul, img, span)
|
||||
import Pux.Html (Html, a, code, div, h1, h3, h4, li, nav, p, pre, text, ul)
|
||||
import Pux.Html.Attributes (attr, target, href, classID, className, id_, role, src, rel, title)
|
||||
import Pux.Router (link)
|
||||
|
||||
data Action
|
||||
= Child (Counter.Action)
|
||||
| BIChild (BlogIndex.Action)
|
||||
| BEChild (BlogEntry.Action)
|
||||
| REChild (Resume.Action)
|
||||
| PageView Route
|
||||
|
||||
type State =
|
||||
{ route :: Route
|
||||
, count :: Counter.State
|
||||
, bistate :: BlogIndex.State
|
||||
, bestate :: BlogEntry.State
|
||||
, restate :: Resume.State }
|
||||
|
||||
init :: State
|
||||
init =
|
||||
{ route: NotFound
|
||||
, count: Counter.init
|
||||
, bistate: BlogIndex.init
|
||||
, bestate: BlogEntry.init
|
||||
, restate: Resume.init }
|
||||
|
||||
update :: Action -> State -> EffModel State Action (ajax :: AJAX, dom :: DOM)
|
||||
update (PageView route) state = routeEffects route $ state { route = route }
|
||||
update (BIChild action) state = BlogIndex.update action state.bistate
|
||||
# mapState (state { bistate = _ })
|
||||
# mapEffects BIChild
|
||||
update (BEChild action) state = BlogEntry.update action state.bestate
|
||||
# mapState (state { bestate = _ })
|
||||
# mapEffects BEChild
|
||||
update (REChild action) state = Resume.update action state.restate
|
||||
# mapState ( state { restate = _ })
|
||||
# mapEffects REChild
|
||||
update (Child action) state = noEffects $ state { count = Counter.update action state.count }
|
||||
update _ state = noEffects $ state
|
||||
|
||||
routeEffects :: Route -> State -> EffModel State Action (dom :: DOM, ajax :: AJAX)
|
||||
routeEffects (BlogIndex) state = { state: state
|
||||
, effects: [ pure BlogIndex.RequestPosts ] } # mapEffects BIChild
|
||||
routeEffects (Resume) state = { state: state
|
||||
, effects: [ pure Resume.RequestResume ] } # mapEffects REChild
|
||||
routeEffects (BlogPost page') state = { state: state { bestate = BlogEntry.init { name = page' } }
|
||||
, effects: [ pure BlogEntry.RequestPost ] } # mapEffects BEChild
|
||||
routeEffects _ state = noEffects $ state
|
||||
|
||||
view :: State -> Html Action
|
||||
view state =
|
||||
div
|
||||
[]
|
||||
[ navbar state
|
||||
, div
|
||||
[ className "container" ]
|
||||
[ page state.route state ]
|
||||
]
|
||||
|
||||
navbar :: State -> Html Action
|
||||
navbar state =
|
||||
nav
|
||||
[ className "pink lighten-1", role "navigation" ]
|
||||
[ div
|
||||
[ className "nav-wrapper container" ]
|
||||
[ link "/" [ className "brand-logo", id_ "logo-container" ] [ text "Christine Dodrill" ]
|
||||
, H.link [ rel "stylesheet", href "/static/css/about/main.css" ] []
|
||||
, ul
|
||||
[ className "right hide-on-med-and-down" ]
|
||||
[ li [] [ link "/blog" [] [ text "Blog" ] ]
|
||||
-- , li [] [ link "/projects" [] [ text "Projects" ] ]
|
||||
, li [] [ link "/resume" [] [ text "Resume" ] ]
|
||||
, li [] [ link "/contact" [] [ text "Contact" ] ]
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
contact :: Html Action
|
||||
contact =
|
||||
div
|
||||
[ className "row" ]
|
||||
[ documentTitle [ title "Contact - Christine Dodrill" ] []
|
||||
, div
|
||||
[ className "col s6" ]
|
||||
[ h3 [] [ text "Email" ]
|
||||
, div [ className "email" ] [ text "me@christine.website" ]
|
||||
, p []
|
||||
[ text "My GPG fingerprint is "
|
||||
, code [] [ text "799F 9134 8118 1111" ]
|
||||
, text ". If you get an email that appears to be from me and the signature does not match that fingerprint, it is not from me. You may download a copy of my public key "
|
||||
, a [ href "/static/gpg.pub" ] [ text "here" ]
|
||||
, text "."
|
||||
]
|
||||
, h3 [] [ text "Social Media" ]
|
||||
, ul
|
||||
[ className "browser-default" ]
|
||||
[ li [] [ a [ href "https://github.com/Xe" ] [ text "Github" ] ]
|
||||
, li [] [ a [ href "https://twitter.com/theprincessxena"] [ text "Twitter" ] ]
|
||||
, li [] [ a [ href "https://keybase.io/xena" ] [ text "Keybase" ] ]
|
||||
, li [] [ a [ href "https://www.coinbase.com/christinedodrill" ] [ text "Coinbase" ] ]
|
||||
, li [] [ a [ href "https://www.facebook.com/chrissycade1337" ] [ text "Facebook" ] ]
|
||||
]
|
||||
]
|
||||
, div
|
||||
[ className "col s6" ]
|
||||
[ h3 [] [ text "Other Information" ]
|
||||
, p []
|
||||
[ text "To send me donations, my bitcoin address is "
|
||||
, code [] [ text "1Gi2ZF2C9CU9QooH8bQMB2GJ2iL6shVnVe" ]
|
||||
, text "."
|
||||
]
|
||||
, div []
|
||||
[ h4 [] [ text "IRC" ]
|
||||
, p [] [ text "I am on many IRC networks. On Freenode I am using the nick Xe but elsewhere I will use the nick Xena or Cadey." ]
|
||||
]
|
||||
, div []
|
||||
[ h4 [] [ text "Telegram" ]
|
||||
, a [ href "https://telegram.me/miamorecadenza" ] [ text "@miamorecadenza" ]
|
||||
]
|
||||
, div []
|
||||
[ h4 [] [ text "Discord" ]
|
||||
, pre [] [ text "Cadey~#1932" ]
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
index :: Html Action
|
||||
index =
|
||||
div
|
||||
[ className "row panel" ]
|
||||
[ documentTitle [ title "Christine Dodrill" ] []
|
||||
, div [] [ div
|
||||
[ className "col m4 bg_blur valign-wrapper center-align" ]
|
||||
[ div
|
||||
[ className "valign center-align fb_wrap" ]
|
||||
[ link "/contact"
|
||||
[ className "btn follow_btn" ]
|
||||
[ text "Contact Me" ]
|
||||
]
|
||||
]
|
||||
]
|
||||
, div
|
||||
[ className "col m8" ]
|
||||
[ div
|
||||
[ className "header" ]
|
||||
[ h1 [] [ text "Christine Dodrill" ]
|
||||
, h4 [] [ text "Rockstar Hacker, Freelance Programmer, Gopher, Cloud Architect" ]
|
||||
, span [] [ text "I am a GitHub power user. I am constantly learning new languages and tools. I strongly believe in knowing many languages and ways to do things so I can pick the right tool for the job." ]
|
||||
, h2 [] [ text "Skills" ]
|
||||
, ul
|
||||
[ className "browser-default" ]
|
||||
[ li [] [ text "Go, Moonscript, Lua, Python, C, Nim, Haskell" ]
|
||||
, li [] [ text "Docker deployments" ]
|
||||
, li [] [ text "Research, Development and Experimentation" ]
|
||||
]
|
||||
, h2 [] [ text "Side Projects" ]
|
||||
, ul
|
||||
[ className "browser-default" ]
|
||||
[ li [] [ text "Real-time globally distributed chat server maintenance" ]
|
||||
, li [] [ text "Mashups of chat, video and music" ]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
page :: Route -> State -> Html Action
|
||||
page NotFound _ = h1 [] [ text "not found" ]
|
||||
page Home _ = index
|
||||
page Resume state = map REChild $ Resume.view state.restate
|
||||
page BlogIndex state = map BIChild $ BlogIndex.view state.bistate
|
||||
page (BlogPost _) state = map BEChild $ BlogEntry.view state.bestate
|
||||
page ContactPage _ = contact
|
||||
page _ _ = h1 [] [ text "not implemented yet" ]
|
|
@ -1,53 +0,0 @@
|
|||
module Main where
|
||||
|
||||
import App.Layout (Action(PageView), State, view, update)
|
||||
import App.Routes (match)
|
||||
import Control.Bind ((=<<))
|
||||
import Control.Monad.Eff (Eff)
|
||||
import DOM (DOM)
|
||||
import Network.HTTP.Affjax (AJAX)
|
||||
import Prelude (bind, pure)
|
||||
import Pux (renderToDOM, renderToString, App, Config, CoreEffects, start)
|
||||
import Pux.Devtool (Action, start) as Pux.Devtool
|
||||
import Pux.Router (sampleUrl)
|
||||
import Signal ((~>))
|
||||
|
||||
type AppEffects = (dom :: DOM, ajax :: AJAX)
|
||||
|
||||
-- | App configuration
|
||||
config :: forall eff. State -> Eff (dom :: DOM | eff) (Config State Action AppEffects)
|
||||
config state = do
|
||||
-- | Create a signal of URL changes.
|
||||
urlSignal <- sampleUrl
|
||||
|
||||
-- | Map a signal of URL changes to PageView actions.
|
||||
let routeSignal = urlSignal ~> \r -> PageView (match r)
|
||||
|
||||
pure
|
||||
{ initialState: state
|
||||
, update: update
|
||||
, view: view
|
||||
, inputs: [routeSignal] }
|
||||
|
||||
-- | Entry point for the browser.
|
||||
main :: State -> Eff (CoreEffects AppEffects) (App State Action)
|
||||
main state = do
|
||||
app <- start =<< config state
|
||||
renderToDOM "#app" app.html
|
||||
-- | Used by hot-reloading code in support/index.js
|
||||
pure app
|
||||
|
||||
-- | Entry point for the browser with pux-devtool injected.
|
||||
debug :: State -> Eff (CoreEffects AppEffects) (App State (Pux.Devtool.Action Action))
|
||||
debug state = do
|
||||
app <- Pux.Devtool.start =<< config state
|
||||
renderToDOM "#app" app.html
|
||||
-- | Used by hot-reloading code in support/index.js
|
||||
pure app
|
||||
|
||||
-- | Entry point for server side rendering
|
||||
ssr :: State -> Eff (CoreEffects AppEffects) String
|
||||
ssr state = do
|
||||
app <- start =<< config state
|
||||
res <- renderToString app.html
|
||||
pure res
|
|
@ -1,8 +0,0 @@
|
|||
module App.NotFound where
|
||||
|
||||
import Pux.Html (Html, (#), div, h2, text)
|
||||
|
||||
view :: forall state action. state -> Html action
|
||||
view state =
|
||||
div # do
|
||||
h2 # text "404 Not Found"
|
|
@ -1,3 +0,0 @@
|
|||
var Pux = require('purescript-pux');
|
||||
|
||||
exports.documentTitle = Pux.fromReact(require('react-document-title'));
|
|
@ -1,7 +0,0 @@
|
|||
module Pux.DocumentTitle where
|
||||
|
||||
import Pux.Html (Html, Attribute)
|
||||
|
||||
-- | Declaratively set `document.title`. See [react-document-title](https://github.com/gaearon/react-document-title)
|
||||
-- | for more information.
|
||||
foreign import documentTitle :: forall a. Array (Attribute a) -> Array (Html a) -> Html a
|
|
@ -1,66 +0,0 @@
|
|||
module App.Resume where
|
||||
|
||||
import App.Utils (mdify)
|
||||
import Control.Monad.Aff (attempt)
|
||||
import DOM (DOM)
|
||||
import Data.Argonaut (class DecodeJson, decodeJson, (.?))
|
||||
import Data.Either (Either(..), either)
|
||||
import Data.Maybe (Maybe(..))
|
||||
import Network.HTTP.Affjax (AJAX, get)
|
||||
import Prelude (Unit, bind, pure, show, unit, ($), (<>), (<<<))
|
||||
import Pux (noEffects, EffModel)
|
||||
import Pux.DocumentTitle (documentTitle)
|
||||
import Pux.Html (Html, a, div, h1, p, text)
|
||||
import Pux.Html.Attributes (href, dangerouslySetInnerHTML, className, id_, title)
|
||||
|
||||
data Action = RequestResume
|
||||
| ReceiveResume (Either String Resume)
|
||||
|
||||
type State =
|
||||
{ status :: String
|
||||
, err :: String
|
||||
, resume :: Maybe Resume }
|
||||
|
||||
data Resume = Resume
|
||||
{ body :: String }
|
||||
|
||||
instance decodeJsonResume :: DecodeJson Resume where
|
||||
decodeJson json = do
|
||||
obj <- decodeJson json
|
||||
body <- obj .? "body"
|
||||
pure $ Resume { body: body }
|
||||
|
||||
init :: State
|
||||
init =
|
||||
{ status: "Loading..."
|
||||
, err: ""
|
||||
, resume: Nothing }
|
||||
|
||||
update :: Action -> State -> EffModel State Action (ajax :: AJAX, dom :: DOM)
|
||||
update (ReceiveResume (Left err)) state =
|
||||
noEffects $ state { resume = Nothing, status = "Error in fetching resume, please use the plain text link below.", err = err }
|
||||
update (ReceiveResume (Right body)) state =
|
||||
noEffects $ state { status = "", err = "", resume = Just body }
|
||||
where
|
||||
got' = Just unit
|
||||
update RequestResume state =
|
||||
{ state: state
|
||||
, effects: [ do
|
||||
res <- attempt $ get "/api/resume"
|
||||
let decode r = decodeJson r.response :: Either String Resume
|
||||
let resume = either (Left <<< show) decode res
|
||||
pure $ ReceiveResume resume
|
||||
]
|
||||
}
|
||||
|
||||
view :: State -> Html Action
|
||||
view { status: status, err: err, resume: resume } =
|
||||
case resume of
|
||||
Nothing -> div [] [ text status, p [] [ text err ] ]
|
||||
(Just (Resume resume')) ->
|
||||
div [ className "row" ]
|
||||
[ documentTitle [ title "Resume - Christine Dodrill" ] []
|
||||
, div [ className "col s8 offset-s2" ]
|
||||
[ p [ className "browser-default", dangerouslySetInnerHTML $ mdify resume'.body ] []
|
||||
, a [ href "/static/resume/resume.md" ] [ text "Plain-text version of this resume here" ], text "." ]
|
||||
]
|
|
@ -1,31 +0,0 @@
|
|||
module App.Routes where
|
||||
|
||||
import App.BlogEntry as BlogEntry
|
||||
import App.BlogIndex as BlogIndex
|
||||
import App.Counter as Counter
|
||||
import Control.Alt ((<|>))
|
||||
import Control.Apply ((<*), (*>))
|
||||
import Data.Functor ((<$))
|
||||
import Data.Maybe (fromMaybe)
|
||||
import Prelude (($), (<$>))
|
||||
import Pux.Router (param, router, lit, str, end)
|
||||
|
||||
data Route = Home
|
||||
| Resume
|
||||
| ContactPage
|
||||
| StaticPage String
|
||||
| BlogIndex
|
||||
| BlogPost String
|
||||
| NotFound
|
||||
|
||||
match :: String -> Route
|
||||
match url = fromMaybe NotFound $ router url $
|
||||
Home <$ end
|
||||
<|>
|
||||
BlogIndex <$ lit "blog" <* end
|
||||
<|>
|
||||
BlogPost <$> (lit "blog" *> str) <* end
|
||||
<|>
|
||||
ContactPage <$ lit "contact" <* end
|
||||
<|>
|
||||
Resume <$ lit "resume" <* end
|
|
@ -1,16 +0,0 @@
|
|||
// Module App.BlogEntry
|
||||
|
||||
showdown = require("showdown");
|
||||
|
||||
showdown.extension('blog', function() {
|
||||
return [{
|
||||
type: 'output',
|
||||
regex: /<ul>/g,
|
||||
replace: '<ul class="browser-default">'
|
||||
}];
|
||||
});
|
||||
|
||||
exports.mdify = function(corpus) {
|
||||
var converter = new showdown.Converter({ extensions: ['blog'] });
|
||||
return converter.makeHtml(corpus);
|
||||
};
|
|
@ -1,3 +0,0 @@
|
|||
module App.Utils where
|
||||
|
||||
foreign import mdify :: String -> String
|
|
@ -1,18 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta http-equiv="Content-type" content="text/html; charset=utf-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Christine Dodrill</title>
|
||||
|
||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.8/css/materialize.min.css">
|
||||
<link rel="stylesheet" href="/static/css/main.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
|
||||
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.8/js/materialize.min.js"></script>
|
||||
</body>
|
||||
</html>
|
|
@ -1,25 +0,0 @@
|
|||
var Main = require('../src/Main.purs');
|
||||
var initialState = require('../src/Layout.purs').init;
|
||||
var debug = process.env.NODE_ENV === 'development'
|
||||
|
||||
if (module.hot) {
|
||||
var app = Main[debug ? 'debug' : 'main'](window.puxLastState || initialState)();
|
||||
app.state.subscribe(function (state) {
|
||||
window.puxLastState = state;
|
||||
});
|
||||
module.hot.accept();
|
||||
} else {
|
||||
Main[debug ? 'debug' : 'main'](initialState)();
|
||||
}
|
||||
|
||||
global.main = function(args, callback) {
|
||||
var body = Main['ssr'](initialState)();
|
||||
|
||||
result = {
|
||||
"app": body,
|
||||
"uuid": args.uuid,
|
||||
"title": "Christine Dodrill"
|
||||
}
|
||||
|
||||
callback(result);
|
||||
};
|
Before Width: | Height: | Size: 2.0 MiB |
|
@ -1,102 +0,0 @@
|
|||
var path = require('path');
|
||||
var webpack = require('webpack');
|
||||
var HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
|
||||
var port = process.env.PORT || 3000;
|
||||
|
||||
var config = {
|
||||
entry: [
|
||||
'webpack-hot-middleware/client?reload=true',
|
||||
path.join(__dirname, 'support/index.js'),
|
||||
],
|
||||
devtool: 'cheap-module-eval-source-map',
|
||||
output: {
|
||||
path: path.resolve('./static/dist'),
|
||||
filename: '[name].js',
|
||||
publicPath: '/'
|
||||
},
|
||||
module: {
|
||||
loaders: [
|
||||
{ test: /\.js$/, loader: 'source-map-loader', exclude: /node_modules|bower_components/ },
|
||||
{
|
||||
test: /\.purs$/,
|
||||
loader: 'purs-loader',
|
||||
exclude: /node_modules/,
|
||||
query: {
|
||||
psc: 'psa',
|
||||
pscArgs: {
|
||||
sourceMaps: true
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
},
|
||||
plugins: [
|
||||
new webpack.DefinePlugin({
|
||||
'process.env.NODE_ENV': JSON.stringify('development')
|
||||
}),
|
||||
new webpack.optimize.OccurrenceOrderPlugin(true),
|
||||
new webpack.LoaderOptionsPlugin({
|
||||
debug: true
|
||||
}),
|
||||
new webpack.SourceMapDevToolPlugin({
|
||||
filename: '[file].map',
|
||||
moduleFilenameTemplate: '[absolute-resource-path]',
|
||||
fallbackModuleFilenameTemplate: '[absolute-resource-path]'
|
||||
}),
|
||||
new HtmlWebpackPlugin({
|
||||
template: 'support/index.html',
|
||||
inject: 'body',
|
||||
filename: 'index.html'
|
||||
}),
|
||||
new webpack.HotModuleReplacementPlugin(),
|
||||
new webpack.NoErrorsPlugin(),
|
||||
],
|
||||
resolveLoader: {
|
||||
modules: [
|
||||
path.join(__dirname, 'node_modules')
|
||||
]
|
||||
},
|
||||
resolve: {
|
||||
modules: [
|
||||
'node_modules',
|
||||
'bower_components'
|
||||
],
|
||||
extensions: ['.js', '.purs']
|
||||
},
|
||||
};
|
||||
|
||||
// If this file is directly run with node, start the development server
|
||||
// instead of exporting the webpack config.
|
||||
if (require.main === module) {
|
||||
var compiler = webpack(config);
|
||||
var express = require('express');
|
||||
var app = express();
|
||||
|
||||
// Use webpack-dev-middleware and webpack-hot-middleware instead of
|
||||
// webpack-dev-server, because webpack-hot-middleware provides more reliable
|
||||
// HMR behavior, and an in-browser overlay that displays build errors
|
||||
app
|
||||
.use(express.static('./static'))
|
||||
.use(require('connect-history-api-fallback')())
|
||||
.use(require("webpack-dev-middleware")(compiler, {
|
||||
publicPath: config.output.publicPath,
|
||||
stats: {
|
||||
hash: false,
|
||||
timings: false,
|
||||
version: false,
|
||||
assets: false,
|
||||
errors: true,
|
||||
colors: false,
|
||||
chunks: false,
|
||||
children: false,
|
||||
cached: false,
|
||||
modules: false,
|
||||
chunkModules: false,
|
||||
},
|
||||
}))
|
||||
.use(require("webpack-hot-middleware")(compiler))
|
||||
.listen(port);
|
||||
} else {
|
||||
module.exports = config;
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
var path = require('path');
|
||||
var webpack = require('webpack');
|
||||
var HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
var webpackUglifyJsPlugin = require('webpack-uglify-js-plugin');
|
||||
var FaviconsWebpackPlugin = require('favicons-webpack-plugin');
|
||||
|
||||
module.exports = {
|
||||
entry: [ path.join(__dirname, 'support/index.js') ],
|
||||
output: {
|
||||
path: path.resolve('./static/dist'),
|
||||
filename: '[name]-[hash].min.js',
|
||||
publicPath: '/dist/'
|
||||
},
|
||||
module: {
|
||||
loaders: [
|
||||
{
|
||||
test: /\.purs$/,
|
||||
loader: 'purs-loader',
|
||||
exclude: /node_modules/,
|
||||
query: {
|
||||
psc: 'psa',
|
||||
bundle: true,
|
||||
warnings: false
|
||||
}
|
||||
}
|
||||
],
|
||||
},
|
||||
plugins: [
|
||||
new webpack.DefinePlugin({
|
||||
'process.env.NODE_ENV': JSON.stringify('production')
|
||||
}),
|
||||
new webpack.optimize.OccurrenceOrderPlugin(true),
|
||||
new webpack.LoaderOptionsPlugin({
|
||||
minimize: true,
|
||||
debug: false
|
||||
}),
|
||||
new HtmlWebpackPlugin({
|
||||
template: 'support/index.html',
|
||||
inject: 'body',
|
||||
filename: 'index.html'
|
||||
}),
|
||||
new FaviconsWebpackPlugin('../static/img/avatar.png'),
|
||||
new webpack.optimize.DedupePlugin(),
|
||||
new webpack.optimize.UglifyJsPlugin({
|
||||
beautify: false,
|
||||
mangle: true,
|
||||
comments: false,
|
||||
compress: {
|
||||
dead_code: true,
|
||||
loops: true,
|
||||
if_return: true,
|
||||
unused: true,
|
||||
warnings: false
|
||||
}
|
||||
})
|
||||
],
|
||||
resolveLoader: {
|
||||
modules: [
|
||||
path.join(__dirname, 'node_modules')
|
||||
]
|
||||
},
|
||||
resolve: {
|
||||
modules: [
|
||||
'node_modules',
|
||||
'bower_components'
|
||||
],
|
||||
extensions: ['.js', '.purs']
|
||||
}
|
||||
};
|
|
@ -0,0 +1,13 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/google/gops/agent"
|
||||
)
|
||||
|
||||
func init() {
|
||||
if err := agent.Listen(nil); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/Xe/ln"
|
||||
)
|
||||
|
||||
func logTemplateTime(name string, from time.Time) {
|
||||
now := time.Now()
|
||||
ln.Log(ln.F{"action": "template_rendered", "dur": now.Sub(from).String(), "name": name})
|
||||
}
|
||||
|
||||
func (s *Site) renderTemplatePage(templateFname string, data interface{}) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
defer logTemplateTime(templateFname, time.Now())
|
||||
s.tlock.RLock()
|
||||
defer s.tlock.RUnlock()
|
||||
|
||||
var t *template.Template
|
||||
var err error
|
||||
|
||||
if s.templates[templateFname] == nil {
|
||||
t, err = template.ParseFiles("templates/base.html", "templates/"+templateFname)
|
||||
if err != nil {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
ln.Error(err, ln.F{"action": "renderTemplatePage", "page": templateFname})
|
||||
fmt.Fprintf(w, "error: %v", err)
|
||||
}
|
||||
|
||||
ln.Log(ln.F{"action": "loaded_new_template", "fname": templateFname})
|
||||
|
||||
s.tlock.RUnlock()
|
||||
s.tlock.Lock()
|
||||
s.templates[templateFname] = t
|
||||
s.tlock.Unlock()
|
||||
s.tlock.RLock()
|
||||
} else {
|
||||
t = s.templates[templateFname]
|
||||
}
|
||||
|
||||
err = t.Execute(w, data)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Site) showPost(w http.ResponseWriter, r *http.Request) {
|
||||
if r.RequestURI == "/blog/" {
|
||||
http.Redirect(w, r, "/blog", http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
|
||||
var p *Post
|
||||
for _, pst := range s.Posts {
|
||||
if pst.Link == r.RequestURI[1:] {
|
||||
p = pst
|
||||
}
|
||||
}
|
||||
|
||||
if p == nil {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
s.renderTemplatePage("error.html", "no such post found: "+r.RequestURI).ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
s.renderTemplatePage("blogpost.html", p).ServeHTTP(w, r)
|
||||
}
|
|
@ -0,0 +1,204 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/GeertJohan/go.rice"
|
||||
"github.com/Xe/jsonfeed"
|
||||
"github.com/Xe/ln"
|
||||
"github.com/gorilla/feeds"
|
||||
"github.com/russross/blackfriday"
|
||||
"github.com/tj/front"
|
||||
)
|
||||
|
||||
var port = os.Getenv("PORT")
|
||||
|
||||
func main() {
|
||||
if port == "" {
|
||||
port = "29384"
|
||||
}
|
||||
|
||||
s, err := Build()
|
||||
if err != nil {
|
||||
ln.Fatal(ln.F{"err": err, "action": "Build"})
|
||||
}
|
||||
|
||||
ln.Log(ln.F{"action": "http_listening", "port": port})
|
||||
http.ListenAndServe(":"+port, s)
|
||||
}
|
||||
|
||||
// Site is the parent object for https://christine.website's backend.
|
||||
type Site struct {
|
||||
Posts Posts
|
||||
Resume template.HTML
|
||||
|
||||
rssFeed *feeds.Feed
|
||||
jsonFeed *jsonfeed.Feed
|
||||
|
||||
mux *http.ServeMux
|
||||
|
||||
templates map[string]*template.Template
|
||||
tlock sync.RWMutex
|
||||
}
|
||||
|
||||
func (s *Site) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
ln.Log(ln.F{"action": "Site.ServeHTTP", "user_ip_address": r.RemoteAddr, "path": r.RequestURI})
|
||||
s.mux.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
// Build creates a new Site instance or fails.
|
||||
func Build() (*Site, error) {
|
||||
type postFM struct {
|
||||
Title string
|
||||
Date string
|
||||
}
|
||||
|
||||
s := &Site{
|
||||
rssFeed: &feeds.Feed{
|
||||
Title: "Christine Dodrill's Blog",
|
||||
Link: &feeds.Link{Href: "https://christine.website/blog"},
|
||||
Description: "My blog posts and rants about various technology things.",
|
||||
Author: &feeds.Author{Name: "Christine Dodrill", Email: "me@christine.website"},
|
||||
Created: bootTime,
|
||||
Copyright: "This work is copyright Christine Dodrill. My viewpoints are my own and not the view of any employer past, current or future.",
|
||||
},
|
||||
jsonFeed: &jsonfeed.Feed{
|
||||
Version: jsonfeed.CurrentVersion,
|
||||
Title: "Christine Dodrill's Blog",
|
||||
HomePageURL: "https://christine.website",
|
||||
FeedURL: "https://christine.website/blog.json",
|
||||
Description: "My blog posts and rants about various technology things.",
|
||||
UserComment: "This is a JSON feed of my blogposts. For more information read: https://jsonfeed.org/version/1",
|
||||
Icon: icon,
|
||||
Favicon: icon,
|
||||
Author: jsonfeed.Author{
|
||||
Name: "Christine Dodrill",
|
||||
Avatar: icon,
|
||||
},
|
||||
},
|
||||
mux: http.NewServeMux(),
|
||||
templates: map[string]*template.Template{},
|
||||
}
|
||||
|
||||
err := filepath.Walk("./blog/", func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
fin, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer fin.Close()
|
||||
|
||||
content, err := ioutil.ReadAll(fin)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var fm postFM
|
||||
remaining, err := front.Unmarshal(content, &fm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
output := blackfriday.MarkdownCommon(remaining)
|
||||
|
||||
p := &Post{
|
||||
Title: fm.Title,
|
||||
Date: fm.Date,
|
||||
Link: strings.Split(path, ".")[0],
|
||||
Body: string(remaining),
|
||||
BodyHTML: template.HTML(output),
|
||||
}
|
||||
|
||||
s.Posts = append(s.Posts, p)
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sort.Sort(sort.Reverse(s.Posts))
|
||||
|
||||
cb, err := rice.FindBox("css")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sb, err := rice.FindBox("static")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s.Resume = template.HTML(blackfriday.MarkdownCommon(sb.MustBytes("resume/resume.md")))
|
||||
|
||||
for _, item := range s.Posts {
|
||||
itime, _ := time.Parse("2006-01-02", item.Date)
|
||||
s.rssFeed.Items = append(s.rssFeed.Items, &feeds.Item{
|
||||
Title: item.Title,
|
||||
Link: &feeds.Link{Href: "https://christine.website/" + item.Link},
|
||||
Description: item.Summary,
|
||||
Created: itime,
|
||||
})
|
||||
|
||||
s.jsonFeed.Items = append(s.jsonFeed.Items, jsonfeed.Item{
|
||||
ID: "https://christine.website/" + item.Link,
|
||||
URL: "https://christine.website/" + item.Link,
|
||||
Title: item.Title,
|
||||
DatePublished: itime,
|
||||
ContentHTML: string(item.BodyHTML),
|
||||
})
|
||||
}
|
||||
|
||||
// Add HTTP routes here
|
||||
s.mux.Handle("/", s.renderTemplatePage("index.html", nil))
|
||||
s.mux.Handle("/resume", s.renderTemplatePage("resume.html", s.Resume))
|
||||
s.mux.Handle("/blog", s.renderTemplatePage("blogindex.html", s.Posts))
|
||||
s.mux.Handle("/contact", s.renderTemplatePage("contact.html", nil))
|
||||
s.mux.HandleFunc("/blog.rss", s.createFeed)
|
||||
s.mux.HandleFunc("/blog.atom", s.createAtom)
|
||||
s.mux.HandleFunc("/blog.json", s.createJsonFeed)
|
||||
s.mux.HandleFunc("/blog/", s.showPost)
|
||||
s.mux.Handle("/static/", http.StripPrefix("/static/", http.FileServer(sb.HTTPBox())))
|
||||
s.mux.Handle("/css/", http.StripPrefix("/css/", http.FileServer(cb.HTTPBox())))
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
const icon = "https://christine.website/static/img/avatar.png"
|
||||
|
||||
// Post is a single blogpost.
|
||||
type Post struct {
|
||||
Title string `json:"title"`
|
||||
Link string `json:"link"`
|
||||
Summary string `json:"summary,omitifempty"`
|
||||
Body string `json:"-"`
|
||||
BodyHTML template.HTML `json:"body"`
|
||||
Date string `json:"date"`
|
||||
}
|
||||
|
||||
// Posts implements sort.Interface for a slice of Post objects.
|
||||
type Posts []*Post
|
||||
|
||||
func (p Posts) Len() int { return len(p) }
|
||||
func (p Posts) Less(i, j int) bool {
|
||||
iDate, _ := time.Parse("2006-01-02", p[i].Date)
|
||||
jDate, _ := time.Parse("2006-01-02", p[j].Date)
|
||||
|
||||
return iDate.Unix() < jDate.Unix()
|
||||
}
|
||||
func (p Posts) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
|
@ -0,0 +1,64 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/Xe/ln"
|
||||
)
|
||||
|
||||
var bootTime = time.Now()
|
||||
|
||||
// IncrediblySecureSalt *******
|
||||
const IncrediblySecureSalt = "hunter2"
|
||||
|
||||
func (s *Site) createFeed(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/rss+xml")
|
||||
w.Header().Set("ETag", Hash(bootTime.String(), IncrediblySecureSalt))
|
||||
|
||||
err := s.rssFeed.WriteRss(w)
|
||||
if err != nil {
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
ln.Error(err, ln.F{
|
||||
"remote_addr": r.RemoteAddr,
|
||||
"action": "generating_rss",
|
||||
"uri": r.RequestURI,
|
||||
"host": r.Host,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) createAtom(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/atom+xml")
|
||||
w.Header().Set("ETag", Hash(bootTime.String(), IncrediblySecureSalt))
|
||||
|
||||
err := s.rssFeed.WriteAtom(w)
|
||||
if err != nil {
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
ln.Error(err, ln.F{
|
||||
"remote_addr": r.RemoteAddr,
|
||||
"action": "generating_atom",
|
||||
"uri": r.RequestURI,
|
||||
"host": r.Host,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) createJsonFeed(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Header().Set("ETag", Hash(bootTime.String(), IncrediblySecureSalt))
|
||||
|
||||
e := json.NewEncoder(w)
|
||||
e.SetIndent("", "\t")
|
||||
err := e.Encode(s.jsonFeed)
|
||||
if err != nil {
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
ln.Error(err, ln.F{
|
||||
"remote_addr": r.RemoteAddr,
|
||||
"action": "generating_jsonfeed",
|
||||
"uri": r.RequestURI,
|
||||
"host": r.Host,
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,108 +0,0 @@
|
|||
img.textwrap-right {
|
||||
float: right;
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
.well {
|
||||
margin-top:-20px;
|
||||
text-align:center;
|
||||
cursor:pointer;
|
||||
font-size: 25px;
|
||||
padding: 15px;
|
||||
border-radius: 0px !important;
|
||||
}
|
||||
|
||||
.well:hover {
|
||||
margin-top:-20px;
|
||||
border:2px solid black;
|
||||
text-align:center;
|
||||
cursor:pointer;
|
||||
font-size: 25px;
|
||||
padding: 15px;
|
||||
border-radius: 0px !important;
|
||||
}
|
||||
|
||||
.bg_blur
|
||||
{
|
||||
background: url('/static/img/avatar.png');
|
||||
height: 512px;
|
||||
background-size: cover;
|
||||
overflow: visible;
|
||||
}
|
||||
|
||||
.bgblurback {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.fb_wrap {
|
||||
margin: 0 auto;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
.follow_btn {
|
||||
text-decoration: none;
|
||||
height: 20%;
|
||||
padding: 10px;
|
||||
background-color: #E5B7CE;
|
||||
padding-top: 6px;
|
||||
color: #fff;
|
||||
text-align: center;
|
||||
font-size: 20px;
|
||||
opacity: 1.0;
|
||||
}
|
||||
|
||||
.follow_btn:hover {
|
||||
text-decoration: none;
|
||||
background-color: #FE9ACD;
|
||||
padding: 10px;
|
||||
padding-top: 6px;
|
||||
color: #fff;
|
||||
text-align: center;
|
||||
font-size: 20px;
|
||||
border: 4px solid rgba(255, 255, 255, 0.8);
|
||||
}
|
||||
|
||||
.header{
|
||||
/*color : #808080;*/
|
||||
margin-left:10%;
|
||||
margin-top:70px;
|
||||
}
|
||||
|
||||
.picture{
|
||||
height:150px;
|
||||
width:150px;
|
||||
top: 75px;
|
||||
left:-75px;
|
||||
float: left;
|
||||
}
|
||||
|
||||
.picture_mob{
|
||||
position: absolute;
|
||||
width: 35%;
|
||||
left: 35%;
|
||||
bottom: 70%;
|
||||
}
|
||||
|
||||
.btn-style{
|
||||
color: #fff;
|
||||
background-color: #007FBE;
|
||||
border-color: #adadad;
|
||||
width: 33.3%;
|
||||
}
|
||||
|
||||
.btn-style:hover {
|
||||
color: #333;
|
||||
background-color: #3D5DE0;
|
||||
border-color: #adadad;
|
||||
width: 33.3%;
|
||||
}
|
||||
|
||||
@media (max-width: 767px) {
|
||||
.header{
|
||||
text-align : center;
|
||||
}
|
||||
|
||||
.nav{
|
||||
margin-top : 30px;
|
||||
}
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
body {
|
||||
background-color: #F2E6EB;
|
||||
}
|
After Width: | Height: | Size: 14 KiB |
After Width: | Height: | Size: 17 KiB |
After Width: | Height: | Size: 3.0 KiB |
After Width: | Height: | Size: 4.1 KiB |
After Width: | Height: | Size: 6.1 KiB |
After Width: | Height: | Size: 8.5 KiB |
After Width: | Height: | Size: 10 KiB |
After Width: | Height: | Size: 11 KiB |
After Width: | Height: | Size: 14 KiB |
After Width: | Height: | Size: 14 KiB |
After Width: | Height: | Size: 18 KiB |
After Width: | Height: | Size: 4.8 KiB |
After Width: | Height: | Size: 5.0 KiB |
After Width: | Height: | Size: 6.1 KiB |
After Width: | Height: | Size: 6.5 KiB |
After Width: | Height: | Size: 18 KiB |
After Width: | Height: | Size: 18 KiB |
|
@ -0,0 +1,2 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<browserconfig><msapplication><tile><square70x70logo src="/ms-icon-70x70.png"/><square150x150logo src="/ms-icon-150x150.png"/><square310x310logo src="/ms-icon-310x310.png"/><TileColor>#ffffff</TileColor></tile></msapplication></browserconfig>
|
After Width: | Height: | Size: 1.7 KiB |
After Width: | Height: | Size: 2.7 KiB |
After Width: | Height: | Size: 8.5 KiB |
After Width: | Height: | Size: 1.1 KiB |
|
@ -0,0 +1,41 @@
|
|||
{
|
||||
"name": "App",
|
||||
"icons": [
|
||||
{
|
||||
"src": "\/android-icon-36x36.png",
|
||||
"sizes": "36x36",
|
||||
"type": "image\/png",
|
||||
"density": "0.75"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-48x48.png",
|
||||
"sizes": "48x48",
|
||||
"type": "image\/png",
|
||||
"density": "1.0"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-72x72.png",
|
||||
"sizes": "72x72",
|
||||
"type": "image\/png",
|
||||
"density": "1.5"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-96x96.png",
|
||||
"sizes": "96x96",
|
||||
"type": "image\/png",
|
||||
"density": "2.0"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-144x144.png",
|
||||
"sizes": "144x144",
|
||||
"type": "image\/png",
|
||||
"density": "3.0"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image\/png",
|
||||
"density": "4.0"
|
||||
}
|
||||
]
|
||||
}
|
After Width: | Height: | Size: 14 KiB |
After Width: | Height: | Size: 14 KiB |
After Width: | Height: | Size: 40 KiB |
After Width: | Height: | Size: 5.9 KiB |
|
@ -2,18 +2,27 @@
|
|||
|
||||
---
|
||||
|
||||
> #### Rockstar Hacker, Cloud Architect, Gopher, Haskeller, Container Expert
|
||||
> ##### Mountain View, CA   [christine.website][homepage]   [@theprincessxena][twitter] ![twit][]
|
||||
> #### Web and Backend Services Devops Specialist
|
||||
> ##### Bellevue, WA   [christine.website][homepage]
|
||||
> `Docker`, `Git`, `Haskell`, `Nim`, `Go`, `C`, `CentOS`, `CoreOS`, `IRC`, `Matrix`
|
||||
|
||||
---
|
||||
> **"** A github power user, constantly learns new things to keep up on what's new in tech.
|
||||
|
||||
## Experience
|
||||
### Heroku - Software Engineer   <small>*2017 - current*</small>
|
||||
> [Heroku][heroku] is a cloud Platform-as-a-Service (PaaS) supporting
|
||||
> several programming languages that is used as a web application deployment model.
|
||||
> Heroku, one of the first cloud platforms, has been in development since June 2007,
|
||||
> when it supported only the Ruby programming language, but now supports Java,
|
||||
> Node.js, Scala, Clojure, Python, PHP, and Go.
|
||||
>
|
||||
> #### Highlights
|
||||
>
|
||||
> - [JVM Application Metrics](https://devcenter.heroku.com/changelog-items/1133)
|
||||
> - [Go Runtime Metrics Agent](https://github.com/heroku/x/tree/master/runtime-metrics)
|
||||
> - Other backend fixes and improvements on [Threshold Autoscaling](https://blog.heroku.com/heroku-autoscaling) and [Threshold Alerting](https://devcenter.heroku.com/articles/metrics#threshold-alerting)
|
||||
|
||||
---
|
||||
## Experience
|
||||
#### Backplane.io - Software Engineer   <small>*2016 - 2016*</small>
|
||||
`Go`, `Docker`, `docker-compose`, `devops`, `PostgreSQL`
|
||||
### Backplane.io - Software Engineer   <small>*2016 - 2016*</small>
|
||||
> [Backplane](https://backplane.io) is an innovative reverse reverse proxy that
|
||||
> helps administrators and startups simplify their web application routing.
|
||||
>
|
||||
|
@ -24,8 +33,7 @@
|
|||
> - Learning a lot about HTTP/2 and load balancing
|
||||
|
||||
---
|
||||
#### Pure Storage - Member of Technical Staff   <small>*2016 - 2016*</small>
|
||||
`Python 2.7`, `Jenkins`, `Ubuntu`, `Puppet`, `TestRail`, `Docker`
|
||||
### Pure Storage - Member of Technical Staff   <small>*2016 - 2016*</small>
|
||||
> Pure Storage is a Mountain View, California-based enterprise data flash storage
|
||||
> company founded in 2009. It is traded on the NYSE (PSTG).
|
||||
>
|
||||
|
@ -33,8 +41,7 @@
|
|||
> - Code maintenance
|
||||
|
||||
---
|
||||
#### IMVU - Site Reliability Engineer   <small>*2015 - 2016*</small>
|
||||
`Ubuntu Server`, `CFEngine`, `Haskell`, `Go`, `Perl`, `Nginx`, `JunOS`, `Ceph`, `MySQL`, `Redis`, `Memcached`, `PHP`, `Erlang`
|
||||
### IMVU - Site Reliability Engineer   <small>*2015 - 2016*</small>
|
||||
> IMVU, inc is a company whose mission is to help people find and communicate
|
||||
> with eachother. Their main product is a 3D avatar-based chat client and its
|
||||
> surrounding infrastructure allowing creators to make content for the avatars
|
||||
|
@ -48,8 +55,7 @@
|
|||
> when it is needed
|
||||
|
||||
---
|
||||
#### VTCSecure - Deis Consultant (contract)   <small>*2014 - 2015*</small>
|
||||
`Deis`, `Docker`, `CoreOS`, `Go`, `Freeswitch`
|
||||
### VTCSecure - Deis Consultant (contract)   <small>*2014 - 2015*</small>
|
||||
> VTCSecure is a company dedicated to helping with custom and standard
|
||||
> audio/video conferencing solutions. They specialize in helping the deaf and
|
||||
> blind communicate over today's infrastructure without any trouble on their end.
|
||||
|
@ -62,8 +68,7 @@
|
|||
> - Learning Freeswitch
|
||||
|
||||
---
|
||||
#### Crowdflower - Deis Consultant (Contract)   <small>*2014 - 2014*</small>
|
||||
`Ruby`, `Rails`, `Chef`, `CoreOS`, `Docker`, `Deis`
|
||||
### Crowdflower - Deis Consultant (Contract)   <small>*2014 - 2014*</small>
|
||||
> Crowdflower is a company that uses crowdsourcing to have its customers submit
|
||||
> tasks to be done, similar to Amazon's Mechanical Turk. CrowdFlower has over 50
|
||||
> labor channel partners, and its network has more than 5 million contributors
|
||||
|
@ -78,8 +83,7 @@
|
|||
> clusters of CoreOS and Fleet machines
|
||||
|
||||
---
|
||||
#### OpDemand - Software Engineering Intern   <small>*2014 - 2014*</small>
|
||||
`Deis`, `CoreOS`, `Go`, `Docker`
|
||||
### OpDemand - Software Engineering Intern   <small>*2014 - 2014*</small>
|
||||
> OpDemand is the company behind the open source project Deis, a distributed
|
||||
> platform-as-a-service (PaaS) designed from the ground up to emulate Heroku but
|
||||
> on privately owned servers.
|
||||
|
@ -91,7 +95,7 @@
|
|||
|
||||
---
|
||||
## Open Source
|
||||
#### [Elemental-IRCd](http://elemental-ircd.com)
|
||||
### [Elemental-IRCd](http://elemental-ircd.com)
|
||||
A scalable RFC compliant IRCv3 enabled IRC server for personal and professional use.
|
||||
|
||||
#### Accomplishments
|
||||
|
@ -103,7 +107,7 @@ Elemental is currently in use in production on several networks, totaling 800-10
|
|||
users per day with spikes of up to 50,000 on special events.
|
||||
|
||||
---
|
||||
#### [Tetra](https://github.com/Xe/Tetra)
|
||||
### [Tetra](https://github.com/Xe/Tetra)
|
||||
A modern IRC services platform for TS6 IRC daemons.
|
||||
|
||||
#### Accomplishments
|
||||
|
@ -120,15 +124,11 @@ Command "PING", ->
|
|||
This will create a command named "PING" that will return "PONG" to the user when it is used.
|
||||
|
||||
---
|
||||
#### [PonyAPI](https://github.com/Xe/ponyapi)
|
||||
### [PonyAPI](https://github.com/Xe/ponyapi)
|
||||
A simple API for information on episodes of My Little Pony: Friendship is Magic written in Nim to be run inside a container.
|
||||
|
||||
All data is loaded into ram and there are no usage limits as long as you agree to not take down the server it is running on.
|
||||
|
||||
---
|
||||
#### [Professional Projects](https://github.com/Xe)
|
||||
Projects here will be of a more professional nature (save a few here and there).
|
||||
|
||||
---
|
||||
## Writing
|
||||
|
||||
|
@ -139,3 +139,4 @@ Projects here will be of a more professional nature (save a few here and there).
|
|||
[homepage]: https://christine.website
|
||||
[twitter]: https://twitter.com/theprincessxena
|
||||
[twit]: http://cdn-careers.sstatic.net/careers/Img/icon-twitter.png?v=b1bd58ad2034
|
||||
[heroku]: https://www.heroku.com
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
<html>
|
||||
<head>
|
||||
{{ template "title" . }}
|
||||
<link rel="stylesheet" href="/css/hack.css" />
|
||||
<link rel="stylesheet" href="/css/solarized-dark.css" />
|
||||
|
||||
<link rel="alternate" type="application/rss+xml" href="https://christine.website/blog.rss" />
|
||||
<link rel="alternate" type="application/atom+xml" href="https://christine.website/blog.atom" />
|
||||
<link rel="alternate" title="My Feed" type="application/json" href="https://christine.website/blog.json" />
|
||||
|
||||
<link rel="apple-touch-icon" sizes="57x57" href="/static/favicon/apple-icon-57x57.png">
|
||||
<link rel="apple-touch-icon" sizes="60x60" href="/static/favicon/apple-icon-60x60.png">
|
||||
<link rel="apple-touch-icon" sizes="72x72" href="/static/favicon/apple-icon-72x72.png">
|
||||
<link rel="apple-touch-icon" sizes="76x76" href="/static/favicon/apple-icon-76x76.png">
|
||||
<link rel="apple-touch-icon" sizes="114x114" href="/static/favicon/apple-icon-114x114.png">
|
||||
<link rel="apple-touch-icon" sizes="120x120" href="/static/favicon/apple-icon-120x120.png">
|
||||
<link rel="apple-touch-icon" sizes="144x144" href="/static/favicon/apple-icon-144x144.png">
|
||||
<link rel="apple-touch-icon" sizes="152x152" href="/static/favicon/apple-icon-152x152.png">
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="/static/favicon/apple-icon-180x180.png">
|
||||
<link rel="icon" type="image/png" sizes="192x192" href="/static/favicon/android-icon-192x192.png">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/static/favicon/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/static/favicon/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/static/favicon/favicon-16x16.png">
|
||||
<link rel="manifest" href="/static/favicon/manifest.json">
|
||||
<meta name="msapplication-TileColor" content="#ffffff">
|
||||
<meta name="msapplication-TileImage" content="/static/favicon/ms-icon-144x144.png">
|
||||
<meta name="theme-color" content="#ffffff">
|
||||
<style>
|
||||
.main {
|
||||
padding: 20px 10px;
|
||||
}
|
||||
|
||||
.hack h1 {
|
||||
padding-top: 0;
|
||||
}
|
||||
|
||||
footer.footer {
|
||||
border-top: 1px solid #ccc;
|
||||
margin-top: 80px;
|
||||
margin-top: 5rem;
|
||||
padding: 48px 0;
|
||||
padding: 3rem 0;
|
||||
}
|
||||
|
||||
img {
|
||||
max-width: 100%;
|
||||
padding: 1em;
|
||||
}
|
||||
</style>
|
||||
{{ template "styles" . }}
|
||||
</head>
|
||||
<body class="hack solarized-dark">
|
||||
{{ template "scripts" . }}
|
||||
<div class="container">
|
||||
<header>
|
||||
<p><a href="/">Christine Dodrill</a> - <a href="/blog">Blog</a> - <a href="/contact">Contact</a> - <a href="/resume">Resume</a></p>
|
||||
</header>
|
||||
{{ template "content" . }}
|
||||
<footer>
|
||||
<blockquote>Copyright 2017 Christine Dodrill. Any and all opinions listed here are my own and not representative of my employer.</blockquote>
|
||||
</footer>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
{{ define "scripts" }}{{ end }}
|
||||
{{ define "styles" }}{{ end }}
|
|
@ -0,0 +1,22 @@
|
|||
{{ define "title" }}
|
||||
<title>Blog - Christine Dodrill</title>
|
||||
|
||||
<style>
|
||||
.blogpost-card {
|
||||
text-align: center;
|
||||
}
|
||||
</style>
|
||||
{{ end }}
|
||||
|
||||
{{ define "content" }}
|
||||
<div class="grid">
|
||||
{{ range . }}
|
||||
<div class="card cell -4of12 blogpost-card">
|
||||
<header class="card-header">{{ .Title }}</header>
|
||||
<div class="card-content">
|
||||
<p>Posted on {{ .Date }} <br> <a href="{{ .Link }}">Read Post</a></p>
|
||||
</div>
|
||||
</div>
|
||||
{{ end }}
|
||||
</div>
|
||||
{{ end }}
|
|
@ -0,0 +1,11 @@
|
|||
{{ define "title" }}
|
||||
<title>{{ .Title }} - Christine Dodrill</title>
|
||||
{{ end }}
|
||||
|
||||
{{ define "content" }}
|
||||
{{ .BodyHTML }}
|
||||
|
||||
<hr />
|
||||
|
||||
<i>Content posted on {{ .Date }}, opinions and preferences of the author may have changed since then.</i>
|
||||
{{ end }}
|
|
@ -0,0 +1,36 @@
|
|||
{{ define "title" }}<title>Contact - Christine Dodrill</title>{{ end }}
|
||||
|
||||
{{ define "content" }}
|
||||
<h1>Contact Information</h1>
|
||||
<div class="grid">
|
||||
<div class="cell -6of12">
|
||||
<h3>Email</h3>
|
||||
<p>me@christine.website</p>
|
||||
|
||||
<p>My GPG fingerprint is <code>799F 9134 8118 1111</code>. If you get an email that appears to be from me and the signature does not match that fingerprint, it is not from me. You may download a copy of my public key <a href="/static/gpg.pub">here</a>.</p>
|
||||
|
||||
<h3>Social Media</h3>
|
||||
<ul>
|
||||
<li><a href="https://github.com/Xe">Github</a></li>
|
||||
<li><a href="https://twitter.com/theprincessxena">Twitter</a></li>
|
||||
<li><a href="https://keybase.io/xena">Keybase</a></li>
|
||||
<li><a href="https://www.coinbase.com/christinedodrill">Coinbase</a></li>
|
||||
<li><a href="https://ko-fi.com/A265JE0">Ko-fi</a></li>
|
||||
<li><a href="https://www.facebook.com/chrissycade1337">Facebook</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="cell -6of12">
|
||||
<h3>Other Information</h3>
|
||||
<p>To send me donations, my bitcoin address is <code>1Gi2ZF2C9CU9QooH8bQMB2GJ2iL6shVnVe</code>.</p>
|
||||
|
||||
<h4>IRC</h4>
|
||||
<p>I am on many IRC networks. On Freenode I am using the nick Xe but elsewhere I will use the nick Xena or Cadey.</p>
|
||||
|
||||
<h4>Telegram</h4>
|
||||
<p><a href="https://t.me/miamorecadenza">@miamorecadenza</a></p>
|
||||
|
||||
<h4>Discord</h4>
|
||||
<p><code>Cadey~#1932</code></p>
|
||||
</div>
|
||||
</div>
|
||||
{{ end }}
|
|
@ -0,0 +1,9 @@
|
|||
{{ define "title" }}
|
||||
<title>Error - Christine Dodrill</title>
|
||||
{{ end }}
|
||||
|
||||
{{ define "content" }}
|
||||
<pre>
|
||||
{{ . }}
|
||||
</pre>
|
||||
{{ end }}
|
|
@ -0,0 +1,30 @@
|
|||
{{ define "title" }}<title>Christine Dodrill</title>{{ end }}
|
||||
|
||||
{{ define "content" }}
|
||||
<div class="grid">
|
||||
<div class="cell -3of12 content">
|
||||
<img src="/static/img/avatar.png">
|
||||
<br />
|
||||
<a href="/contact" class="justify-content-center">Contact Me</a>
|
||||
</div>
|
||||
<div class="cell -9of12 content">
|
||||
<h1>Christine Dodrill</h1>
|
||||
<h4>Web and Backend Services Devops Specialist</h4>
|
||||
<h5>Skills</h5>
|
||||
<ul>
|
||||
<li>Go, Lua, Nim, Haskell, C, Python (3.x) and other languages</li>
|
||||
<li>Docker (deployment, development & more)</li>
|
||||
<li>Mashups of data</li>
|
||||
<li>Package maintainer for Alpine Linux</li>
|
||||
</ul>
|
||||
|
||||
<h5>Highlighted Projects</h5>
|
||||
<ul>
|
||||
<li><a href="https://github.com/Xe/PonyAPI">PonyAPI</a> - My Little Pony: Friendship is Magic Episode information API</li>
|
||||
<li><a href="https://github.com/PonyvilleFM/aura">Aura</a> - PonyvilleFM live DJ recording bot</li>
|
||||
<li><a href="https://github.com/Elemental-IRCd/elemental-ircd">Elemental-IRCd</a> - IRC Server Software</li>
|
||||
<li><a href="https://github.com/Xe/site">This website</a> - The backend and templates for this website</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
{{ end }}
|
|
@ -0,0 +1,9 @@
|
|||
{{ define "title" }}<title>Resume - Christine Dodrill</title>{{ end }}
|
||||
|
||||
{{ define "content" }}
|
||||
{{ . }}
|
||||
|
||||
<hr />
|
||||
|
||||
<a href="/static/resume/resume.md">Plain-text version of this resume here</a>
|
||||
{{ end }}
|
22
vendor-log
|
@ -1,11 +1,15 @@
|
|||
94c8a5673a78ada68d7b97e1d4657cffc6ec68d7 github.com/gernest/front
|
||||
a5b47d31c556af34a302ce5d659e6fea44d90de0 gopkg.in/yaml.v2
|
||||
b68094ba95c055dfda888baa8947dfe44c20b1ac github.com/Xe/asarfs
|
||||
5e4d0891fe789f2da0c2d5afada3b6a1ede6d64c layeh.com/asar
|
||||
33a50704c528b4b00db129f75c693facf7f3838b (dirty) github.com/Xe/asarfs
|
||||
5e4d0891fe789f2da0c2d5afada3b6a1ede6d64c layeh.com/asar
|
||||
3f7ce7b928e14ff890b067e5bbbc80af73690a9c github.com/urfave/negroni
|
||||
f3687a5cd8e600f93e02174f5c0b91b56d54e8d0 github.com/Xe/gopreload
|
||||
49bd2f58881c34d534aa97bd64bdbdf37be0df91 github.com/Xe/ln
|
||||
c02ca9a983da5807ddf7d796784928f5be4afd09 github.com/GeertJohan/go.rice
|
||||
c02ca9a983da5807ddf7d796784928f5be4afd09 github.com/GeertJohan/go.rice/embedded
|
||||
a00a8beb369cafd88bb7b32f31fc4ff3219c3565 github.com/Xe/gopreload
|
||||
b685d4edebe855f8edbb4e605c0bf74e1e60b0e9 github.com/Xe/jsonfeed
|
||||
f759b797c0ff6b2c514202198fe5e8ba90094c14 github.com/Xe/ln
|
||||
a5fe2436ffcb3236e175e5149162b41cd28bd27d github.com/daaku/go.zipexe
|
||||
62f833fc9f6c4d3223bdb37bd0c2f8951bed8596 github.com/google/gops/agent
|
||||
62f833fc9f6c4d3223bdb37bd0c2f8951bed8596 github.com/google/gops/internal
|
||||
62f833fc9f6c4d3223bdb37bd0c2f8951bed8596 github.com/google/gops/signal
|
||||
441264de03a8117ed530ae8e049d8f601a33a099 github.com/gorilla/feeds
|
||||
c2c54e542fb797ad986b31721e1baedf214ca413 github.com/kardianos/osext
|
||||
ff09b135c25aae272398c51a07235b90a75aa4f0 github.com/pkg/errors
|
||||
0ba0f2b6ed7c475a92e4df8641825cb7a11d1fa3 github.com/russross/blackfriday
|
||||
739be213b0a1c496dccaf9e5df1514150c9548e4 github.com/tj/front
|
||||
9f9df34309c04878acc86042b16630b0f696e1de gopkg.in/yaml.v1
|
||||
|
|
|
@ -0,0 +1,138 @@
|
|||
package rice
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/daaku/go.zipexe"
|
||||
"github.com/kardianos/osext"
|
||||
)
|
||||
|
||||
// appendedBox defines an appended box
|
||||
type appendedBox struct {
|
||||
Name string // box name
|
||||
Files map[string]*appendedFile // appended files (*zip.File) by full path
|
||||
}
|
||||
|
||||
type appendedFile struct {
|
||||
zipFile *zip.File
|
||||
dir bool
|
||||
dirInfo *appendedDirInfo
|
||||
children []*appendedFile
|
||||
content []byte
|
||||
}
|
||||
|
||||
// appendedBoxes is a public register of appendes boxes
|
||||
var appendedBoxes = make(map[string]*appendedBox)
|
||||
|
||||
func init() {
|
||||
// find if exec is appended
|
||||
thisFile, err := osext.Executable()
|
||||
if err != nil {
|
||||
return // not appended or cant find self executable
|
||||
}
|
||||
closer, rd, err := zipexe.OpenCloser(thisFile)
|
||||
if err != nil {
|
||||
return // not appended
|
||||
}
|
||||
defer closer.Close()
|
||||
|
||||
for _, f := range rd.File {
|
||||
// get box and file name from f.Name
|
||||
fileParts := strings.SplitN(strings.TrimLeft(filepath.ToSlash(f.Name), "/"), "/", 2)
|
||||
boxName := fileParts[0]
|
||||
var fileName string
|
||||
if len(fileParts) > 1 {
|
||||
fileName = fileParts[1]
|
||||
}
|
||||
|
||||
// find box or create new one if doesn't exist
|
||||
box := appendedBoxes[boxName]
|
||||
if box == nil {
|
||||
box = &appendedBox{
|
||||
Name: boxName,
|
||||
Files: make(map[string]*appendedFile),
|
||||
}
|
||||
appendedBoxes[boxName] = box
|
||||
}
|
||||
|
||||
// create and add file to box
|
||||
af := &appendedFile{
|
||||
zipFile: f,
|
||||
}
|
||||
if f.Comment == "dir" {
|
||||
af.dir = true
|
||||
af.dirInfo = &appendedDirInfo{
|
||||
name: filepath.Base(af.zipFile.Name),
|
||||
//++ TODO: use zip modtime when that is set correctly: af.zipFile.ModTime()
|
||||
time: time.Now(),
|
||||
}
|
||||
} else {
|
||||
// this is a file, we need it's contents so we can create a bytes.Reader when the file is opened
|
||||
// make a new byteslice
|
||||
af.content = make([]byte, af.zipFile.FileInfo().Size())
|
||||
// ignore reading empty files from zip (empty file still is a valid file to be read though!)
|
||||
if len(af.content) > 0 {
|
||||
// open io.ReadCloser
|
||||
rc, err := af.zipFile.Open()
|
||||
if err != nil {
|
||||
af.content = nil // this will cause an error when the file is being opened or seeked (which is good)
|
||||
// TODO: it's quite blunt to just log this stuff. but this is in init, so rice.Debug can't be changed yet..
|
||||
log.Printf("error opening appended file %s: %v", af.zipFile.Name, err)
|
||||
} else {
|
||||
_, err = rc.Read(af.content)
|
||||
rc.Close()
|
||||
if err != nil {
|
||||
af.content = nil // this will cause an error when the file is being opened or seeked (which is good)
|
||||
// TODO: it's quite blunt to just log this stuff. but this is in init, so rice.Debug can't be changed yet..
|
||||
log.Printf("error reading data for appended file %s: %v", af.zipFile.Name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add appendedFile to box file list
|
||||
box.Files[fileName] = af
|
||||
|
||||
// add to parent dir (if any)
|
||||
dirName := filepath.Dir(fileName)
|
||||
if dirName == "." {
|
||||
dirName = ""
|
||||
}
|
||||
if fileName != "" { // don't make box root dir a child of itself
|
||||
if dir := box.Files[dirName]; dir != nil {
|
||||
dir.children = append(dir.children, af)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// implements os.FileInfo.
|
||||
// used for Readdir()
|
||||
type appendedDirInfo struct {
|
||||
name string
|
||||
time time.Time
|
||||
}
|
||||
|
||||
func (adi *appendedDirInfo) Name() string {
|
||||
return adi.name
|
||||
}
|
||||
func (adi *appendedDirInfo) Size() int64 {
|
||||
return 0
|
||||
}
|
||||
func (adi *appendedDirInfo) Mode() os.FileMode {
|
||||
return os.ModeDir
|
||||
}
|
||||
func (adi *appendedDirInfo) ModTime() time.Time {
|
||||
return adi.time
|
||||
}
|
||||
func (adi *appendedDirInfo) IsDir() bool {
|
||||
return true
|
||||
}
|
||||
func (adi *appendedDirInfo) Sys() interface{} {
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,337 @@
|
|||
package rice
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/GeertJohan/go.rice/embedded"
|
||||
)
|
||||
|
||||
// Box abstracts a directory for resources/files.
|
||||
// It can either load files from disk, or from embedded code (when `rice --embed` was ran).
|
||||
type Box struct {
|
||||
name string
|
||||
absolutePath string
|
||||
embed *embedded.EmbeddedBox
|
||||
appendd *appendedBox
|
||||
}
|
||||
|
||||
var defaultLocateOrder = []LocateMethod{LocateEmbedded, LocateAppended, LocateFS}
|
||||
|
||||
func findBox(name string, order []LocateMethod) (*Box, error) {
|
||||
b := &Box{name: name}
|
||||
|
||||
// no support for absolute paths since gopath can be different on different machines.
|
||||
// therefore, required box must be located relative to package requiring it.
|
||||
if filepath.IsAbs(name) {
|
||||
return nil, errors.New("given name/path is absolute")
|
||||
}
|
||||
|
||||
var err error
|
||||
for _, method := range order {
|
||||
switch method {
|
||||
case LocateEmbedded:
|
||||
if embed := embedded.EmbeddedBoxes[name]; embed != nil {
|
||||
b.embed = embed
|
||||
return b, nil
|
||||
}
|
||||
|
||||
case LocateAppended:
|
||||
appendedBoxName := strings.Replace(name, `/`, `-`, -1)
|
||||
if appendd := appendedBoxes[appendedBoxName]; appendd != nil {
|
||||
b.appendd = appendd
|
||||
return b, nil
|
||||
}
|
||||
|
||||
case LocateFS:
|
||||
// resolve absolute directory path
|
||||
err := b.resolveAbsolutePathFromCaller()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
// check if absolutePath exists on filesystem
|
||||
info, err := os.Stat(b.absolutePath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
// check if absolutePath is actually a directory
|
||||
if !info.IsDir() {
|
||||
err = errors.New("given name/path is not a directory")
|
||||
continue
|
||||
}
|
||||
return b, nil
|
||||
case LocateWorkingDirectory:
|
||||
// resolve absolute directory path
|
||||
err := b.resolveAbsolutePathFromWorkingDirectory()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
// check if absolutePath exists on filesystem
|
||||
info, err := os.Stat(b.absolutePath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
// check if absolutePath is actually a directory
|
||||
if !info.IsDir() {
|
||||
err = errors.New("given name/path is not a directory")
|
||||
continue
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
err = fmt.Errorf("could not locate box %q", name)
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// FindBox returns a Box instance for given name.
|
||||
// When the given name is a relative path, it's base path will be the calling pkg/cmd's source root.
|
||||
// When the given name is absolute, it's absolute. derp.
|
||||
// Make sure the path doesn't contain any sensitive information as it might be placed into generated go source (embedded).
|
||||
func FindBox(name string) (*Box, error) {
|
||||
return findBox(name, defaultLocateOrder)
|
||||
}
|
||||
|
||||
// MustFindBox returns a Box instance for given name, like FindBox does.
|
||||
// It does not return an error, instead it panics when an error occurs.
|
||||
func MustFindBox(name string) *Box {
|
||||
box, err := findBox(name, defaultLocateOrder)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return box
|
||||
}
|
||||
|
||||
// This is injected as a mutable function literal so that we can mock it out in
|
||||
// tests and return a fixed test file.
|
||||
var resolveAbsolutePathFromCaller = func(name string, nStackFrames int) (string, error) {
|
||||
_, callingGoFile, _, ok := runtime.Caller(nStackFrames)
|
||||
if !ok {
|
||||
return "", errors.New("couldn't find caller on stack")
|
||||
}
|
||||
|
||||
// resolve to proper path
|
||||
pkgDir := filepath.Dir(callingGoFile)
|
||||
// fix for go cover
|
||||
const coverPath = "_test/_obj_test"
|
||||
if !filepath.IsAbs(pkgDir) {
|
||||
if i := strings.Index(pkgDir, coverPath); i >= 0 {
|
||||
pkgDir = pkgDir[:i] + pkgDir[i+len(coverPath):] // remove coverPath
|
||||
pkgDir = filepath.Join(os.Getenv("GOPATH"), "src", pkgDir) // make absolute
|
||||
}
|
||||
}
|
||||
return filepath.Join(pkgDir, name), nil
|
||||
}
|
||||
|
||||
func (b *Box) resolveAbsolutePathFromCaller() error {
|
||||
path, err := resolveAbsolutePathFromCaller(b.name, 4)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b.absolutePath = path
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (b *Box) resolveAbsolutePathFromWorkingDirectory() error {
|
||||
path, err := os.Getwd()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b.absolutePath = filepath.Join(path, b.name)
|
||||
return nil
|
||||
}
|
||||
|
||||
// IsEmbedded indicates wether this box was embedded into the application
|
||||
func (b *Box) IsEmbedded() bool {
|
||||
return b.embed != nil
|
||||
}
|
||||
|
||||
// IsAppended indicates wether this box was appended to the application
|
||||
func (b *Box) IsAppended() bool {
|
||||
return b.appendd != nil
|
||||
}
|
||||
|
||||
// Time returns how actual the box is.
|
||||
// When the box is embedded, it's value is saved in the embedding code.
|
||||
// When the box is live, this methods returns time.Now()
|
||||
func (b *Box) Time() time.Time {
|
||||
if b.IsEmbedded() {
|
||||
return b.embed.Time
|
||||
}
|
||||
|
||||
//++ TODO: return time for appended box
|
||||
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
// Open opens a File from the box
|
||||
// If there is an error, it will be of type *os.PathError.
|
||||
func (b *Box) Open(name string) (*File, error) {
|
||||
if Debug {
|
||||
fmt.Printf("Open(%s)\n", name)
|
||||
}
|
||||
|
||||
if b.IsEmbedded() {
|
||||
if Debug {
|
||||
fmt.Println("Box is embedded")
|
||||
}
|
||||
|
||||
// trim prefix (paths are relative to box)
|
||||
name = strings.TrimLeft(name, "/")
|
||||
if Debug {
|
||||
fmt.Printf("Trying %s\n", name)
|
||||
}
|
||||
|
||||
// search for file
|
||||
ef := b.embed.Files[name]
|
||||
if ef == nil {
|
||||
if Debug {
|
||||
fmt.Println("Didn't find file in embed")
|
||||
}
|
||||
// file not found, try dir
|
||||
ed := b.embed.Dirs[name]
|
||||
if ed == nil {
|
||||
if Debug {
|
||||
fmt.Println("Didn't find dir in embed")
|
||||
}
|
||||
// dir not found, error out
|
||||
return nil, &os.PathError{
|
||||
Op: "open",
|
||||
Path: name,
|
||||
Err: os.ErrNotExist,
|
||||
}
|
||||
}
|
||||
if Debug {
|
||||
fmt.Println("Found dir. Returning virtual dir")
|
||||
}
|
||||
vd := newVirtualDir(ed)
|
||||
return &File{virtualD: vd}, nil
|
||||
}
|
||||
|
||||
// box is embedded
|
||||
if Debug {
|
||||
fmt.Println("Found file. Returning virtual file")
|
||||
}
|
||||
vf := newVirtualFile(ef)
|
||||
return &File{virtualF: vf}, nil
|
||||
}
|
||||
|
||||
if b.IsAppended() {
|
||||
// trim prefix (paths are relative to box)
|
||||
name = strings.TrimLeft(name, "/")
|
||||
|
||||
// search for file
|
||||
appendedFile := b.appendd.Files[name]
|
||||
if appendedFile == nil {
|
||||
return nil, &os.PathError{
|
||||
Op: "open",
|
||||
Path: name,
|
||||
Err: os.ErrNotExist,
|
||||
}
|
||||
}
|
||||
|
||||
// create new file
|
||||
f := &File{
|
||||
appendedF: appendedFile,
|
||||
}
|
||||
|
||||
// if this file is a directory, we want to be able to read and seek
|
||||
if !appendedFile.dir {
|
||||
// looks like malformed data in zip, error now
|
||||
if appendedFile.content == nil {
|
||||
return nil, &os.PathError{
|
||||
Op: "open",
|
||||
Path: "name",
|
||||
Err: errors.New("error reading data from zip file"),
|
||||
}
|
||||
}
|
||||
// create new bytes.Reader
|
||||
f.appendedFileReader = bytes.NewReader(appendedFile.content)
|
||||
}
|
||||
|
||||
// all done
|
||||
return f, nil
|
||||
}
|
||||
|
||||
// perform os open
|
||||
if Debug {
|
||||
fmt.Printf("Using os.Open(%s)", filepath.Join(b.absolutePath, name))
|
||||
}
|
||||
file, err := os.Open(filepath.Join(b.absolutePath, name))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &File{realF: file}, nil
|
||||
}
|
||||
|
||||
// Bytes returns the content of the file with given name as []byte.
|
||||
func (b *Box) Bytes(name string) ([]byte, error) {
|
||||
file, err := b.Open(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
content, err := ioutil.ReadAll(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return content, nil
|
||||
}
|
||||
|
||||
// MustBytes returns the content of the file with given name as []byte.
|
||||
// panic's on error.
|
||||
func (b *Box) MustBytes(name string) []byte {
|
||||
bts, err := b.Bytes(name)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return bts
|
||||
}
|
||||
|
||||
// String returns the content of the file with given name as string.
|
||||
func (b *Box) String(name string) (string, error) {
|
||||
// check if box is embedded, optimized fast path
|
||||
if b.IsEmbedded() {
|
||||
// find file in embed
|
||||
ef := b.embed.Files[name]
|
||||
if ef == nil {
|
||||
return "", os.ErrNotExist
|
||||
}
|
||||
// return as string
|
||||
return ef.Content, nil
|
||||
}
|
||||
|
||||
bts, err := b.Bytes(name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(bts), nil
|
||||
}
|
||||
|
||||
// MustString returns the content of the file with given name as string.
|
||||
// panic's on error.
|
||||
func (b *Box) MustString(name string) string {
|
||||
str, err := b.String(name)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
// Name returns the name of the box
|
||||
func (b *Box) Name() string {
|
||||
return b.name
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package rice
|
||||
|
||||
// LocateMethod defines how a box is located.
|
||||
type LocateMethod int
|
||||
|
||||
const (
|
||||
LocateFS = LocateMethod(iota) // Locate on the filesystem according to package path.
|
||||
LocateAppended // Locate boxes appended to the executable.
|
||||
LocateEmbedded // Locate embedded boxes.
|
||||
LocateWorkingDirectory // Locate on the binary working directory
|
||||
)
|
||||
|
||||
// Config allows customizing the box lookup behavior.
|
||||
type Config struct {
|
||||
// LocateOrder defines the priority order that boxes are searched for. By
|
||||
// default, the package global FindBox searches for embedded boxes first,
|
||||
// then appended boxes, and then finally boxes on the filesystem. That
|
||||
// search order may be customized by provided the ordered list here. Leaving
|
||||
// out a particular method will omit that from the search space. For
|
||||
// example, []LocateMethod{LocateEmbedded, LocateAppended} will never search
|
||||
// the filesystem for boxes.
|
||||
LocateOrder []LocateMethod
|
||||
}
|
||||
|
||||
// FindBox searches for boxes using the LocateOrder of the config.
|
||||
func (c *Config) FindBox(boxName string) (*Box, error) {
|
||||
return findBox(boxName, c.LocateOrder)
|
||||
}
|
||||
|
||||
// MustFindBox searches for boxes using the LocateOrder of the config, like
|
||||
// FindBox does. It does not return an error, instead it panics when an error
|
||||
// occurs.
|
||||
func (c *Config) MustFindBox(boxName string) *Box {
|
||||
box, err := findBox(boxName, c.LocateOrder)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return box
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
package rice
|
||||
|
||||
// Debug can be set to true to enable debugging.
|
||||
var Debug = false
|
|
@ -0,0 +1,90 @@
|
|||
package rice
|
||||
|
||||
import (
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/GeertJohan/go.rice/embedded"
|
||||
)
|
||||
|
||||
// re-type to make exported methods invisible to user (godoc)
|
||||
// they're not required for the user
|
||||
// embeddedDirInfo implements os.FileInfo
|
||||
type embeddedDirInfo embedded.EmbeddedDir
|
||||
|
||||
// Name returns the base name of the directory
|
||||
// (implementing os.FileInfo)
|
||||
func (ed *embeddedDirInfo) Name() string {
|
||||
return ed.Filename
|
||||
}
|
||||
|
||||
// Size always returns 0
|
||||
// (implementing os.FileInfo)
|
||||
func (ed *embeddedDirInfo) Size() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
// Mode returns the file mode bits
|
||||
// (implementing os.FileInfo)
|
||||
func (ed *embeddedDirInfo) Mode() os.FileMode {
|
||||
return os.FileMode(0555 | os.ModeDir) // dr-xr-xr-x
|
||||
}
|
||||
|
||||
// ModTime returns the modification time
|
||||
// (implementing os.FileInfo)
|
||||
func (ed *embeddedDirInfo) ModTime() time.Time {
|
||||
return ed.DirModTime
|
||||
}
|
||||
|
||||
// IsDir returns the abbreviation for Mode().IsDir() (always true)
|
||||
// (implementing os.FileInfo)
|
||||
func (ed *embeddedDirInfo) IsDir() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Sys returns the underlying data source (always nil)
|
||||
// (implementing os.FileInfo)
|
||||
func (ed *embeddedDirInfo) Sys() interface{} {
|
||||
return nil
|
||||
}
|
||||
|
||||
// re-type to make exported methods invisible to user (godoc)
|
||||
// they're not required for the user
|
||||
// embeddedFileInfo implements os.FileInfo
|
||||
type embeddedFileInfo embedded.EmbeddedFile
|
||||
|
||||
// Name returns the base name of the file
|
||||
// (implementing os.FileInfo)
|
||||
func (ef *embeddedFileInfo) Name() string {
|
||||
return ef.Filename
|
||||
}
|
||||
|
||||
// Size returns the length in bytes for regular files; system-dependent for others
|
||||
// (implementing os.FileInfo)
|
||||
func (ef *embeddedFileInfo) Size() int64 {
|
||||
return int64(len(ef.Content))
|
||||
}
|
||||
|
||||
// Mode returns the file mode bits
|
||||
// (implementing os.FileInfo)
|
||||
func (ef *embeddedFileInfo) Mode() os.FileMode {
|
||||
return os.FileMode(0555) // r-xr-xr-x
|
||||
}
|
||||
|
||||
// ModTime returns the modification time
|
||||
// (implementing os.FileInfo)
|
||||
func (ef *embeddedFileInfo) ModTime() time.Time {
|
||||
return ef.FileModTime
|
||||
}
|
||||
|
||||
// IsDir returns the abbreviation for Mode().IsDir() (always false)
|
||||
// (implementing os.FileInfo)
|
||||
func (ef *embeddedFileInfo) IsDir() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// Sys returns the underlying data source (always nil)
|
||||
// (implementing os.FileInfo)
|
||||
func (ef *embeddedFileInfo) Sys() interface{} {
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
// Package embedded defines embedded data types that are shared between the go.rice package and generated code.
|
||||
package embedded
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
EmbedTypeGo = 0
|
||||
EmbedTypeSyso = 1
|
||||
)
|
||||
|
||||
// EmbeddedBox defines an embedded box
|
||||
type EmbeddedBox struct {
|
||||
Name string // box name
|
||||
Time time.Time // embed time
|
||||
EmbedType int // kind of embedding
|
||||
Files map[string]*EmbeddedFile // ALL embedded files by full path
|
||||
Dirs map[string]*EmbeddedDir // ALL embedded dirs by full path
|
||||
}
|
||||
|
||||
// Link creates the ChildDirs and ChildFiles links in all EmbeddedDir's
|
||||
func (e *EmbeddedBox) Link() {
|
||||
for path, ed := range e.Dirs {
|
||||
fmt.Println(path)
|
||||
ed.ChildDirs = make([]*EmbeddedDir, 0)
|
||||
ed.ChildFiles = make([]*EmbeddedFile, 0)
|
||||
}
|
||||
for path, ed := range e.Dirs {
|
||||
parentDirpath, _ := filepath.Split(path)
|
||||
if strings.HasSuffix(parentDirpath, "/") {
|
||||
parentDirpath = parentDirpath[:len(parentDirpath)-1]
|
||||
}
|
||||
parentDir := e.Dirs[parentDirpath]
|
||||
if parentDir == nil {
|
||||
panic("parentDir `" + parentDirpath + "` is missing in embedded box")
|
||||
}
|
||||
parentDir.ChildDirs = append(parentDir.ChildDirs, ed)
|
||||
}
|
||||
for path, ef := range e.Files {
|
||||
dirpath, _ := filepath.Split(path)
|
||||
if strings.HasSuffix(dirpath, "/") {
|
||||
dirpath = dirpath[:len(dirpath)-1]
|
||||
}
|
||||
dir := e.Dirs[dirpath]
|
||||
if dir == nil {
|
||||
panic("dir `" + dirpath + "` is missing in embedded box")
|
||||
}
|
||||
dir.ChildFiles = append(dir.ChildFiles, ef)
|
||||
}
|
||||
}
|
||||
|
||||
// EmbeddedDir is instanced in the code generated by the rice tool and contains all necicary information about an embedded file
|
||||
type EmbeddedDir struct {
|
||||
Filename string
|
||||
DirModTime time.Time
|
||||
ChildDirs []*EmbeddedDir // direct childs, as returned by virtualDir.Readdir()
|
||||
ChildFiles []*EmbeddedFile // direct childs, as returned by virtualDir.Readdir()
|
||||
}
|
||||
|
||||
// EmbeddedFile is instanced in the code generated by the rice tool and contains all necicary information about an embedded file
|
||||
type EmbeddedFile struct {
|
||||
Filename string // filename
|
||||
FileModTime time.Time
|
||||
Content string
|
||||
}
|
||||
|
||||
// EmbeddedBoxes is a public register of embedded boxes
|
||||
var EmbeddedBoxes = make(map[string]*EmbeddedBox)
|
||||
|
||||
// RegisterEmbeddedBox registers an EmbeddedBox
|
||||
func RegisterEmbeddedBox(name string, box *EmbeddedBox) {
|
||||
if _, exists := EmbeddedBoxes[name]; exists {
|
||||
panic(fmt.Sprintf("EmbeddedBox with name `%s` exists already", name))
|
||||
}
|
||||
EmbeddedBoxes[name] = box
|
||||
}
|
|
@ -0,0 +1,144 @@
|
|||
package rice
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// File implements the io.Reader, io.Seeker, io.Closer and http.File interfaces
|
||||
type File struct {
|
||||
// File abstracts file methods so the user doesn't see the difference between rice.virtualFile, rice.virtualDir and os.File
|
||||
// TODO: maybe use internal File interface and four implementations: *os.File, appendedFile, virtualFile, virtualDir
|
||||
|
||||
// real file on disk
|
||||
realF *os.File
|
||||
|
||||
// when embedded (go)
|
||||
virtualF *virtualFile
|
||||
virtualD *virtualDir
|
||||
|
||||
// when appended (zip)
|
||||
appendedF *appendedFile
|
||||
appendedFileReader *bytes.Reader
|
||||
// TODO: is appendedFileReader subject of races? Might need a lock here..
|
||||
}
|
||||
|
||||
// Close is like (*os.File).Close()
|
||||
// Visit http://golang.org/pkg/os/#File.Close for more information
|
||||
func (f *File) Close() error {
|
||||
if f.appendedF != nil {
|
||||
if f.appendedFileReader == nil {
|
||||
return errors.New("already closed")
|
||||
}
|
||||
f.appendedFileReader = nil
|
||||
return nil
|
||||
}
|
||||
if f.virtualF != nil {
|
||||
return f.virtualF.close()
|
||||
}
|
||||
if f.virtualD != nil {
|
||||
return f.virtualD.close()
|
||||
}
|
||||
return f.realF.Close()
|
||||
}
|
||||
|
||||
// Stat is like (*os.File).Stat()
|
||||
// Visit http://golang.org/pkg/os/#File.Stat for more information
|
||||
func (f *File) Stat() (os.FileInfo, error) {
|
||||
if f.appendedF != nil {
|
||||
if f.appendedF.dir {
|
||||
return f.appendedF.dirInfo, nil
|
||||
}
|
||||
if f.appendedFileReader == nil {
|
||||
return nil, errors.New("file is closed")
|
||||
}
|
||||
return f.appendedF.zipFile.FileInfo(), nil
|
||||
}
|
||||
if f.virtualF != nil {
|
||||
return f.virtualF.stat()
|
||||
}
|
||||
if f.virtualD != nil {
|
||||
return f.virtualD.stat()
|
||||
}
|
||||
return f.realF.Stat()
|
||||
}
|
||||
|
||||
// Readdir is like (*os.File).Readdir()
|
||||
// Visit http://golang.org/pkg/os/#File.Readdir for more information
|
||||
func (f *File) Readdir(count int) ([]os.FileInfo, error) {
|
||||
if f.appendedF != nil {
|
||||
if f.appendedF.dir {
|
||||
fi := make([]os.FileInfo, 0, len(f.appendedF.children))
|
||||
for _, childAppendedFile := range f.appendedF.children {
|
||||
if childAppendedFile.dir {
|
||||
fi = append(fi, childAppendedFile.dirInfo)
|
||||
} else {
|
||||
fi = append(fi, childAppendedFile.zipFile.FileInfo())
|
||||
}
|
||||
}
|
||||
return fi, nil
|
||||
}
|
||||
//++ TODO: is os.ErrInvalid the correct error for Readdir on file?
|
||||
return nil, os.ErrInvalid
|
||||
}
|
||||
if f.virtualF != nil {
|
||||
return f.virtualF.readdir(count)
|
||||
}
|
||||
if f.virtualD != nil {
|
||||
return f.virtualD.readdir(count)
|
||||
}
|
||||
return f.realF.Readdir(count)
|
||||
}
|
||||
|
||||
// Read is like (*os.File).Read()
|
||||
// Visit http://golang.org/pkg/os/#File.Read for more information
|
||||
func (f *File) Read(bts []byte) (int, error) {
|
||||
if f.appendedF != nil {
|
||||
if f.appendedFileReader == nil {
|
||||
return 0, &os.PathError{
|
||||
Op: "read",
|
||||
Path: filepath.Base(f.appendedF.zipFile.Name),
|
||||
Err: errors.New("file is closed"),
|
||||
}
|
||||
}
|
||||
if f.appendedF.dir {
|
||||
return 0, &os.PathError{
|
||||
Op: "read",
|
||||
Path: filepath.Base(f.appendedF.zipFile.Name),
|
||||
Err: errors.New("is a directory"),
|
||||
}
|
||||
}
|
||||
return f.appendedFileReader.Read(bts)
|
||||
}
|
||||
if f.virtualF != nil {
|
||||
return f.virtualF.read(bts)
|
||||
}
|
||||
if f.virtualD != nil {
|
||||
return f.virtualD.read(bts)
|
||||
}
|
||||
return f.realF.Read(bts)
|
||||
}
|
||||
|
||||
// Seek is like (*os.File).Seek()
|
||||
// Visit http://golang.org/pkg/os/#File.Seek for more information
|
||||
func (f *File) Seek(offset int64, whence int) (int64, error) {
|
||||
if f.appendedF != nil {
|
||||
if f.appendedFileReader == nil {
|
||||
return 0, &os.PathError{
|
||||
Op: "seek",
|
||||
Path: filepath.Base(f.appendedF.zipFile.Name),
|
||||
Err: errors.New("file is closed"),
|
||||
}
|
||||
}
|
||||
return f.appendedFileReader.Seek(offset, whence)
|
||||
}
|
||||
if f.virtualF != nil {
|
||||
return f.virtualF.seek(offset, whence)
|
||||
}
|
||||
if f.virtualD != nil {
|
||||
return f.virtualD.seek(offset, whence)
|
||||
}
|
||||
return f.realF.Seek(offset, whence)
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
package rice
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// HTTPBox implements http.FileSystem which allows the use of Box with a http.FileServer.
|
||||
// e.g.: http.Handle("/", http.FileServer(rice.MustFindBox("http-files").HTTPBox()))
|
||||
type HTTPBox struct {
|
||||
*Box
|
||||
}
|
||||
|
||||
// HTTPBox creates a new HTTPBox from an existing Box
|
||||
func (b *Box) HTTPBox() *HTTPBox {
|
||||
return &HTTPBox{b}
|
||||
}
|
||||
|
||||
// Open returns a File using the http.File interface
|
||||
func (hb *HTTPBox) Open(name string) (http.File, error) {
|
||||
return hb.Box.Open(name)
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
package rice
|
||||
|
||||
import "os"
|
||||
|
||||
// SortByName allows an array of os.FileInfo objects
|
||||
// to be easily sorted by filename using sort.Sort(SortByName(array))
|
||||
type SortByName []os.FileInfo
|
||||
|
||||
func (f SortByName) Len() int { return len(f) }
|
||||
func (f SortByName) Less(i, j int) bool { return f[i].Name() < f[j].Name() }
|
||||
func (f SortByName) Swap(i, j int) { f[i], f[j] = f[j], f[i] }
|
||||
|
||||
// SortByModified allows an array of os.FileInfo objects
|
||||
// to be easily sorted by modified date using sort.Sort(SortByModified(array))
|
||||
type SortByModified []os.FileInfo
|
||||
|
||||
func (f SortByModified) Len() int { return len(f) }
|
||||
func (f SortByModified) Less(i, j int) bool { return f[i].ModTime().Unix() > f[j].ModTime().Unix() }
|
||||
func (f SortByModified) Swap(i, j int) { f[i], f[j] = f[j], f[i] }
|
|
@ -0,0 +1,252 @@
|
|||
package rice
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
||||
"github.com/GeertJohan/go.rice/embedded"
|
||||
)
|
||||
|
||||
//++ TODO: IDEA: merge virtualFile and virtualDir, this decreases work done by rice.File
|
||||
|
||||
// Error indicating some function is not implemented yet (but available to satisfy an interface)
|
||||
var ErrNotImplemented = errors.New("not implemented yet")
|
||||
|
||||
// virtualFile is a 'stateful' virtual file.
|
||||
// virtualFile wraps an *EmbeddedFile for a call to Box.Open() and virtualizes 'read cursor' (offset) and 'closing'.
|
||||
// virtualFile is only internally visible and should be exposed through rice.File
|
||||
type virtualFile struct {
|
||||
*embedded.EmbeddedFile // the actual embedded file, embedded to obtain methods
|
||||
offset int64 // read position on the virtual file
|
||||
closed bool // closed when true
|
||||
}
|
||||
|
||||
// create a new virtualFile for given EmbeddedFile
|
||||
func newVirtualFile(ef *embedded.EmbeddedFile) *virtualFile {
|
||||
vf := &virtualFile{
|
||||
EmbeddedFile: ef,
|
||||
offset: 0,
|
||||
closed: false,
|
||||
}
|
||||
return vf
|
||||
}
|
||||
|
||||
//++ TODO check for nil pointers in all these methods. When so: return os.PathError with Err: os.ErrInvalid
|
||||
|
||||
func (vf *virtualFile) close() error {
|
||||
if vf.closed {
|
||||
return &os.PathError{
|
||||
Op: "close",
|
||||
Path: vf.EmbeddedFile.Filename,
|
||||
Err: errors.New("already closed"),
|
||||
}
|
||||
}
|
||||
vf.EmbeddedFile = nil
|
||||
vf.closed = true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vf *virtualFile) stat() (os.FileInfo, error) {
|
||||
if vf.closed {
|
||||
return nil, &os.PathError{
|
||||
Op: "stat",
|
||||
Path: vf.EmbeddedFile.Filename,
|
||||
Err: errors.New("bad file descriptor"),
|
||||
}
|
||||
}
|
||||
return (*embeddedFileInfo)(vf.EmbeddedFile), nil
|
||||
}
|
||||
|
||||
func (vf *virtualFile) readdir(count int) ([]os.FileInfo, error) {
|
||||
if vf.closed {
|
||||
return nil, &os.PathError{
|
||||
Op: "readdir",
|
||||
Path: vf.EmbeddedFile.Filename,
|
||||
Err: errors.New("bad file descriptor"),
|
||||
}
|
||||
}
|
||||
//TODO: return proper error for a readdir() call on a file
|
||||
return nil, ErrNotImplemented
|
||||
}
|
||||
|
||||
func (vf *virtualFile) read(bts []byte) (int, error) {
|
||||
if vf.closed {
|
||||
return 0, &os.PathError{
|
||||
Op: "read",
|
||||
Path: vf.EmbeddedFile.Filename,
|
||||
Err: errors.New("bad file descriptor"),
|
||||
}
|
||||
}
|
||||
|
||||
end := vf.offset + int64(len(bts))
|
||||
|
||||
if end >= int64(len(vf.Content)) {
|
||||
// end of file, so return what we have + EOF
|
||||
n := copy(bts, vf.Content[vf.offset:])
|
||||
vf.offset = 0
|
||||
return n, io.EOF
|
||||
}
|
||||
|
||||
n := copy(bts, vf.Content[vf.offset:end])
|
||||
vf.offset += int64(n)
|
||||
return n, nil
|
||||
|
||||
}
|
||||
|
||||
func (vf *virtualFile) seek(offset int64, whence int) (int64, error) {
|
||||
if vf.closed {
|
||||
return 0, &os.PathError{
|
||||
Op: "seek",
|
||||
Path: vf.EmbeddedFile.Filename,
|
||||
Err: errors.New("bad file descriptor"),
|
||||
}
|
||||
}
|
||||
var e error
|
||||
|
||||
//++ TODO: check if this is correct implementation for seek
|
||||
switch whence {
|
||||
case os.SEEK_SET:
|
||||
//++ check if new offset isn't out of bounds, set e when it is, then break out of switch
|
||||
vf.offset = offset
|
||||
case os.SEEK_CUR:
|
||||
//++ check if new offset isn't out of bounds, set e when it is, then break out of switch
|
||||
vf.offset += offset
|
||||
case os.SEEK_END:
|
||||
//++ check if new offset isn't out of bounds, set e when it is, then break out of switch
|
||||
vf.offset = int64(len(vf.EmbeddedFile.Content)) - offset
|
||||
}
|
||||
|
||||
if e != nil {
|
||||
return 0, &os.PathError{
|
||||
Op: "seek",
|
||||
Path: vf.Filename,
|
||||
Err: e,
|
||||
}
|
||||
}
|
||||
|
||||
return vf.offset, nil
|
||||
}
|
||||
|
||||
// virtualDir is a 'stateful' virtual directory.
|
||||
// virtualDir wraps an *EmbeddedDir for a call to Box.Open() and virtualizes 'closing'.
|
||||
// virtualDir is only internally visible and should be exposed through rice.File
|
||||
type virtualDir struct {
|
||||
*embedded.EmbeddedDir
|
||||
offset int // readdir position on the directory
|
||||
closed bool
|
||||
}
|
||||
|
||||
// create a new virtualDir for given EmbeddedDir
|
||||
func newVirtualDir(ed *embedded.EmbeddedDir) *virtualDir {
|
||||
vd := &virtualDir{
|
||||
EmbeddedDir: ed,
|
||||
offset: 0,
|
||||
closed: false,
|
||||
}
|
||||
return vd
|
||||
}
|
||||
|
||||
func (vd *virtualDir) close() error {
|
||||
//++ TODO: needs sync mutex?
|
||||
if vd.closed {
|
||||
return &os.PathError{
|
||||
Op: "close",
|
||||
Path: vd.EmbeddedDir.Filename,
|
||||
Err: errors.New("already closed"),
|
||||
}
|
||||
}
|
||||
vd.closed = true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vd *virtualDir) stat() (os.FileInfo, error) {
|
||||
if vd.closed {
|
||||
return nil, &os.PathError{
|
||||
Op: "stat",
|
||||
Path: vd.EmbeddedDir.Filename,
|
||||
Err: errors.New("bad file descriptor"),
|
||||
}
|
||||
}
|
||||
return (*embeddedDirInfo)(vd.EmbeddedDir), nil
|
||||
}
|
||||
|
||||
func (vd *virtualDir) readdir(n int) (fi []os.FileInfo, err error) {
|
||||
|
||||
if vd.closed {
|
||||
return nil, &os.PathError{
|
||||
Op: "readdir",
|
||||
Path: vd.EmbeddedDir.Filename,
|
||||
Err: errors.New("bad file descriptor"),
|
||||
}
|
||||
}
|
||||
|
||||
// Build up the array of our contents
|
||||
var files []os.FileInfo
|
||||
|
||||
// Add the child directories
|
||||
for _, child := range vd.ChildDirs {
|
||||
child.Filename = filepath.Base(child.Filename)
|
||||
files = append(files, (*embeddedDirInfo)(child))
|
||||
}
|
||||
|
||||
// Add the child files
|
||||
for _, child := range vd.ChildFiles {
|
||||
child.Filename = filepath.Base(child.Filename)
|
||||
files = append(files, (*embeddedFileInfo)(child))
|
||||
}
|
||||
|
||||
// Sort it by filename (lexical order)
|
||||
sort.Sort(SortByName(files))
|
||||
|
||||
// Return all contents if that's what is requested
|
||||
if n <= 0 {
|
||||
vd.offset = 0
|
||||
return files, nil
|
||||
}
|
||||
|
||||
// If user has requested past the end of our list
|
||||
// return what we can and send an EOF
|
||||
if vd.offset+n >= len(files) {
|
||||
offset := vd.offset
|
||||
vd.offset = 0
|
||||
return files[offset:], io.EOF
|
||||
}
|
||||
|
||||
offset := vd.offset
|
||||
vd.offset += n
|
||||
return files[offset : offset+n], nil
|
||||
|
||||
}
|
||||
|
||||
func (vd *virtualDir) read(bts []byte) (int, error) {
|
||||
if vd.closed {
|
||||
return 0, &os.PathError{
|
||||
Op: "read",
|
||||
Path: vd.EmbeddedDir.Filename,
|
||||
Err: errors.New("bad file descriptor"),
|
||||
}
|
||||
}
|
||||
return 0, &os.PathError{
|
||||
Op: "read",
|
||||
Path: vd.EmbeddedDir.Filename,
|
||||
Err: errors.New("is a directory"),
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *virtualDir) seek(offset int64, whence int) (int64, error) {
|
||||
if vd.closed {
|
||||
return 0, &os.PathError{
|
||||
Op: "seek",
|
||||
Path: vd.EmbeddedDir.Filename,
|
||||
Err: errors.New("bad file descriptor"),
|
||||
}
|
||||
}
|
||||
return 0, &os.PathError{
|
||||
Op: "seek",
|
||||
Path: vd.Filename,
|
||||
Err: errors.New("is a directory"),
|
||||
}
|
||||
}
|
|
@ -0,0 +1,122 @@
|
|||
package rice
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Walk is like filepath.Walk()
|
||||
// Visit http://golang.org/pkg/path/filepath/#Walk for more information
|
||||
func (b *Box) Walk(path string, walkFn filepath.WalkFunc) error {
|
||||
|
||||
pathFile, err := b.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer pathFile.Close()
|
||||
|
||||
pathInfo, err := pathFile.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if b.IsAppended() || b.IsEmbedded() {
|
||||
return b.walk(path, pathInfo, walkFn)
|
||||
}
|
||||
|
||||
// We don't have any embedded or appended box so use live filesystem mode
|
||||
return filepath.Walk(b.absolutePath+string(os.PathSeparator)+path, func(path string, info os.FileInfo, err error) error {
|
||||
|
||||
// Strip out the box name from the returned paths
|
||||
path = strings.TrimPrefix(path, b.absolutePath+string(os.PathSeparator))
|
||||
return walkFn(path, info, err)
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
// walk recursively descends path.
|
||||
// See walk() in $GOROOT/src/pkg/path/filepath/path.go
|
||||
func (b *Box) walk(path string, info os.FileInfo, walkFn filepath.WalkFunc) error {
|
||||
|
||||
err := walkFn(path, info, nil)
|
||||
if err != nil {
|
||||
if info.IsDir() && err == filepath.SkipDir {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if !info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
names, err := b.readDirNames(path)
|
||||
if err != nil {
|
||||
return walkFn(path, info, err)
|
||||
}
|
||||
|
||||
for _, name := range names {
|
||||
|
||||
filename := filepath.Join(path, name)
|
||||
fileObject, err := b.Open(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer fileObject.Close()
|
||||
|
||||
fileInfo, err := fileObject.Stat()
|
||||
if err != nil {
|
||||
if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
err = b.walk(filename, fileInfo, walkFn)
|
||||
if err != nil {
|
||||
if !fileInfo.IsDir() || err != filepath.SkipDir {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
// readDirNames reads the directory named by path and returns a sorted list of directory entries.
|
||||
// See readDirNames() in $GOROOT/pkg/path/filepath/path.go
|
||||
func (b *Box) readDirNames(path string) ([]string, error) {
|
||||
|
||||
f, err := b.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
stat, err := f.Stat()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !stat.IsDir() {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
infos, err := f.Readdir(0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var names []string
|
||||
|
||||
for _, info := range infos {
|
||||
names = append(names, info.Name())
|
||||
}
|
||||
|
||||
sort.Strings(names)
|
||||
return names, nil
|
||||
|
||||
}
|
|
@ -1,117 +0,0 @@
|
|||
package asarfs
|
||||
|
||||
import (
|
||||
"io"
|
||||
"mime"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"layeh.com/asar"
|
||||
)
|
||||
|
||||
// ASARfs serves the contents of an asar archive as an HTTP handler.
|
||||
type ASARfs struct {
|
||||
fin *os.File
|
||||
ar *asar.Entry
|
||||
notFound http.Handler
|
||||
}
|
||||
|
||||
// Close closes the underlying file used for the asar archive.
|
||||
func (a *ASARfs) Close() error {
|
||||
return a.fin.Close()
|
||||
}
|
||||
|
||||
// Open satisfies the http.FileSystem interface for ASARfs.
|
||||
func (a *ASARfs) Open(name string) (http.File, error) {
|
||||
if name == "/" {
|
||||
name = "/index.html"
|
||||
}
|
||||
|
||||
e := a.ar.Find(strings.Split(name, "/")[1:]...)
|
||||
if e == nil {
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
|
||||
f := &file{
|
||||
Entry: e,
|
||||
r: e.Open(),
|
||||
}
|
||||
|
||||
return f, nil
|
||||
}
|
||||
|
||||
// ServeHTTP satisfies the http.Handler interface for ASARfs.
|
||||
func (a *ASARfs) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
if r.RequestURI == "/" {
|
||||
r.RequestURI = "/index.html"
|
||||
}
|
||||
|
||||
f := a.ar.Find(strings.Split(r.RequestURI, "/")[1:]...)
|
||||
if f == nil {
|
||||
a.notFound.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ext := filepath.Ext(f.Name)
|
||||
mimeType := mime.TypeByExtension(ext)
|
||||
|
||||
w.Header().Add("Content-Type", mimeType)
|
||||
f.WriteTo(w)
|
||||
}
|
||||
|
||||
// New creates a new ASARfs pointer based on the filepath to the archive and
|
||||
// a HTTP handler to hit when a file is not found.
|
||||
func New(archivePath string, notFound http.Handler) (*ASARfs, error) {
|
||||
fin, err := os.Open(archivePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
root, err := asar.Decode(fin)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
a := &ASARfs{
|
||||
fin: fin,
|
||||
ar: root,
|
||||
notFound: notFound,
|
||||
}
|
||||
|
||||
return a, nil
|
||||
}
|
||||
|
||||
// file is an internal shim that mimics http.File for an asar entry.
|
||||
type file struct {
|
||||
*asar.Entry
|
||||
r io.ReadSeeker
|
||||
}
|
||||
|
||||
func (f *file) Close() error {
|
||||
f.r = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *file) Read(buf []byte) (n int, err error) {
|
||||
return f.r.Read(buf)
|
||||
}
|
||||
|
||||
func (f *file) Seek(offset int64, whence int) (int64, error) {
|
||||
return f.r.Seek(offset, whence)
|
||||
}
|
||||
|
||||
func (f *file) Readdir(count int) ([]os.FileInfo, error) {
|
||||
result := []os.FileInfo{}
|
||||
|
||||
for _, e := range f.Entry.Children {
|
||||
result = append(result, e.FileInfo())
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (f *file) Stat() (os.FileInfo, error) {
|
||||
return f.Entry.FileInfo(), nil
|
||||
}
|
|
@ -1,156 +0,0 @@
|
|||
// +build go1.8
|
||||
|
||||
package asarfs
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func BenchmarkHTTPFileSystem(b *testing.B) {
|
||||
fs := http.FileServer(http.Dir("."))
|
||||
|
||||
l, s, err := setupHandler(fs)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
defer l.Close()
|
||||
defer s.Close()
|
||||
|
||||
url := fmt.Sprintf("http://%s", l.Addr())
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
testHandler(url)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkASARfs(b *testing.B) {
|
||||
fs, err := New("./static.asar", http.HandlerFunc(do404))
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
l, s, err := setupHandler(fs)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
defer l.Close()
|
||||
defer s.Close()
|
||||
|
||||
url := fmt.Sprintf("http://%s", l.Addr())
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
testHandler(url)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPreloadedASARfs(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
testHandler(asarfsurl)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkASARfsHTTPFilesystem(b *testing.B) {
|
||||
fs, err := New("./static.asar", http.HandlerFunc(do404))
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
l, s, err := setupHandler(http.FileServer(fs))
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
defer l.Close()
|
||||
defer s.Close()
|
||||
|
||||
url := fmt.Sprintf("http://%s", l.Addr())
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
testHandler(url)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPreloadedASARfsHTTPFilesystem(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
testHandler(asarfshttpfsurl)
|
||||
}
|
||||
}
|
||||
|
||||
func do404(w http.ResponseWriter, r *http.Request) {
|
||||
http.Error(w, "Not found", http.StatusNotFound)
|
||||
}
|
||||
|
||||
func setupHandler(h http.Handler) (net.Listener, *http.Server, error) {
|
||||
l, err := net.Listen("tcp", ":0")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer l.Close()
|
||||
|
||||
s := &http.Server{
|
||||
Handler: h,
|
||||
}
|
||||
go s.ListenAndServe()
|
||||
|
||||
return l, s, nil
|
||||
}
|
||||
|
||||
func testHandler(u string) error {
|
||||
num := rand.Intn(9)
|
||||
num++
|
||||
sub := rand.Intn(99)
|
||||
|
||||
fname := fmt.Sprintf("/static/%d/%d%d.json", num, num, sub)
|
||||
|
||||
resp, err := http.Get(u + fname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
_, err = io.Copy(ioutil.Discard, resp.Body)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
asarfsurl string
|
||||
asarfshttpfsurl string
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
go func() {
|
||||
fs, err := New("./static.asar", http.HandlerFunc(do404))
|
||||
if err != nil {
|
||||
}
|
||||
|
||||
l, _, err := setupHandler(fs)
|
||||
if err != nil {
|
||||
}
|
||||
|
||||
asarfsurl = fmt.Sprintf("http://%s", l.Addr().String())
|
||||
}()
|
||||
|
||||
go func() {
|
||||
fs, err := New("./static.asar", http.HandlerFunc(do404))
|
||||
if err != nil {
|
||||
}
|
||||
|
||||
l, _, err := setupHandler(http.FileServer(fs))
|
||||
if err != nil {
|
||||
}
|
||||
|
||||
asarfshttpfsurl = fmt.Sprintf("http://%s", l.Addr().String())
|
||||
}()
|
||||
|
||||
os.Exit(m.Run())
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/Xe/asarfs"
|
||||
)
|
||||
|
||||
func do404(w http.ResponseWriter, r *http.Request) {
|
||||
http.Error(w, "Not found", http.StatusNotFound)
|
||||
}
|
||||
|
||||
func main() {
|
||||
fs, err := asarfs.New("./static.asar", http.HandlerFunc(do404))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
http.ListenAndServe(":"+os.Getenv("PORT"), fs)
|
||||
}
|
|
@ -0,0 +1,242 @@
|
|||
// This Source Code Form is subject to the terms of the Mozilla Public
|
||||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
// file, You can obtain one at http://mozilla.org/MPL/2.0/
|
||||
|
||||
/*
|
||||
Package jsonfeed is a set of types and convenience functions for reading and
|
||||
parsing JSON Feed version 1 as defined here: https://jsonfeed.org/version/1
|
||||
*/
|
||||
package jsonfeed
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"time"
|
||||
)
|
||||
|
||||
// CurrentVersion will point to the current specification of JSON feed
|
||||
// that this package implements.
|
||||
const CurrentVersion = "https://jsonfeed.org/version/1"
|
||||
|
||||
// Item is a single article or link in a JSON Feed.
|
||||
type Item struct {
|
||||
// ID is unique for that item for that feed over time. If an item
|
||||
// is ever updated, the id should be unchanged. New items should
|
||||
// never use a previously-used id. If an id is presented as a number
|
||||
// or other type, a JSON Feed reader must coerce it to a string.
|
||||
// Ideally, the id is the full URL of the resource described by the
|
||||
// item, since URLs make great unique identifiers.
|
||||
ID string `json:"id"`
|
||||
|
||||
// URL is the URL of the resource described by the item. It’s the
|
||||
// permalink. This may be the same as the id — but should be present
|
||||
// regardless.
|
||||
URL string `json:"url,omitempty"`
|
||||
|
||||
// ExternalURL is the URL of a page elsewhere. This is especially
|
||||
// useful for linkblogs. If url links to where you’re talking about
|
||||
// a thing, then this links to the thing you’re talking about.
|
||||
ExternalURL string `json:"external_url,omitempty"`
|
||||
|
||||
// Title (optional, string) is plain text. Microblog items in
|
||||
// particular may omit titles.
|
||||
Title string `json:"title,omitempty"`
|
||||
|
||||
// ContentHTML and ContentText are each optional strings — but one
|
||||
// or both must be present. This is the HTML or plain text of the
|
||||
// item. Important: the only place HTML is allowed in this format
|
||||
// is in content_html. A Twitter-like service might use content_text,
|
||||
// while a blog might use content_html. Use whichever makes sense
|
||||
// for your resource. (It doesn’t even have to be the same for each
|
||||
// item in a feed.)
|
||||
ContentHTML string `json:"content_html,omitempty"`
|
||||
ContentText string `json:"content_text,omitempty"`
|
||||
|
||||
// Summary is a plain text sentence or two describing the item.
|
||||
// This might be presented in a timeline, for instance, where a
|
||||
// detail view would display all of ContentHTML or ContentText.
|
||||
Summary string `json:"summary,omitempty"`
|
||||
|
||||
// Image is the URL of the main image for the item. This image
|
||||
// may also appear in the content_html — if so, it’s a hint to
|
||||
// the feed reader that this is the main, featured image. Feed
|
||||
// readers may use the image as a preview (probably resized as
|
||||
// a thumbnail and placed in a timeline).
|
||||
Image string `json:"image,omitempty"`
|
||||
|
||||
// BannerImage is the URL of an image to use as a banner. Some
|
||||
// blogging systems (such as Medium) display a different banner
|
||||
// image chosen to go with each post, but that image wouldn’t
|
||||
// otherwise appear in the content_html. A feed reader with a
|
||||
// detail view may choose to show this banner image at the top
|
||||
// of the detail view, possibly with the title overlaid.
|
||||
BannerImage string `json:"banner_image,omitempty"`
|
||||
|
||||
// DatePublished specifies the date of this Item's publication.
|
||||
DatePublished time.Time `json:"date_published,omitempty"`
|
||||
|
||||
// DateModified specifies the date of this Item's last modification
|
||||
// (if applicable)
|
||||
DateModified time.Time `json:"date_modified,omitempty"`
|
||||
|
||||
// Author has the same structure as the top-level author. If not
|
||||
// specified in an item, then the top-level author, if present,
|
||||
// is the author of the item.
|
||||
Author *Author `json:"author,omitempty"`
|
||||
|
||||
// Tags can have any plain text values you want. Tags tend to be
|
||||
// just one word, but they may be anything. Note: they are not
|
||||
// the equivalent of Twitter hashtags. Some blogging systems and
|
||||
// other feed formats call these categories.
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
|
||||
// Attachments (optional, array) lists related resources. Podcasts,
|
||||
// for instance, would include an attachment that’s an audio or
|
||||
// video file.
|
||||
Attachments []Attachment `json:"attachments,omitempty"`
|
||||
}
|
||||
|
||||
// Author specifies the feed author. The author object has several members.
|
||||
// These are all optional, but if you provide an author object, then at
|
||||
// least one is required.
|
||||
type Author struct {
|
||||
// Name is the author's name.
|
||||
Name string `json:"name,omitempty"`
|
||||
|
||||
// URL is the URL of a site owned by the author. It could be a
|
||||
// blog, micro-blog, Twitter account, and so on. Ideally the linked-to
|
||||
// page provides a way to contact the author, but that’s not
|
||||
// required. The URL could be a mailto: link, though we suspect
|
||||
// that will be rare.
|
||||
URL string `json:"url,omitempty"`
|
||||
|
||||
// Avatar is the URL for an image for the author. As with icon,
|
||||
// it should be square and relatively large — such as 512 x 512 —
|
||||
// and should use transparency where appropriate, since it may
|
||||
// be rendered on a non-white background.
|
||||
Avatar string `json:"avatar,omitempty"`
|
||||
}
|
||||
|
||||
// Hub describes endpoints that can be used to subscribe to real-time
|
||||
// notifications from the publisher of this feed. Each object has a type
|
||||
// and url, both of which are required.
|
||||
type Hub struct {
|
||||
Type string `json:"type"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
// Attachment is a related resource to an Item. If the Feed describes a
|
||||
// podcast, this would refer to the episodes of said podcast.
|
||||
type Attachment struct {
|
||||
// URL specifies the location of the attachment.
|
||||
URL string `json:"url"`
|
||||
|
||||
// MIMEType specifies the type of the attachment, such as "audio/mpeg".
|
||||
MIMEType string `json:"mime_type"`
|
||||
|
||||
// Title is a name for the attachment. Important: if there are multiple
|
||||
// attachments, and two or more have the exact same title (when title
|
||||
// is present), then they are considered as alternate representations
|
||||
// of the same thing. In this way a podcaster, for instance, might
|
||||
// provide an audio recording in different formats.
|
||||
Title string `json:"title,omitifempty"`
|
||||
|
||||
// SizeInBytes specifies the attachment filesize in bytes.
|
||||
SizeInBytes int64 `json:"size_in_bytes,omitempty"`
|
||||
|
||||
// DurationInSeconds specifies how long the attachment takes to listen
|
||||
// to or watch.
|
||||
DurationInSeconds int64 `json:"duration_in_seconds,omitempty"`
|
||||
}
|
||||
|
||||
// Feed is a list that may change over time, and the individual items in the
|
||||
// list may change.
|
||||
//
|
||||
// Think of a blog or microblog, Twitter or Facebook timeline, set of commits
|
||||
// to a repository, or even a server log. These are all lists, and each could
|
||||
// be described by a Feed.
|
||||
//
|
||||
// A JSON Feed starts with some info at the top: it says where the Feed comes
|
||||
// from, and may say who created it and so on.
|
||||
type Feed struct {
|
||||
// Version is the URL of the version of the format the Feed uses.
|
||||
Version string `json:"version"`
|
||||
|
||||
// Title is the name of the Feed, which will often correspond to the
|
||||
// name of the website (blog, for instance), though not necessarily.
|
||||
Title string `json:"title"`
|
||||
|
||||
// HomePageURL is the URL of the resource that the Feed describes.
|
||||
// This resource may or may not actually be a “home” page, but it
|
||||
// should be an HTML page. If a Feed is published on the public web,
|
||||
// this should be considered as required. But it may not make sense
|
||||
// in the case of a file created on a desktop computer, when that
|
||||
// file is not shared or is shared only privately.
|
||||
//
|
||||
// This field is strongly reccomended, but not required.
|
||||
HomePageURL string `json:"home_page_url,omitempty"`
|
||||
|
||||
// FeedURL is the URL of the Feed, and serves as the unique identifier
|
||||
// for the Feed. As with home_page_url, this should be considered
|
||||
// required for Feeds on the public web.
|
||||
//
|
||||
// This field is strongly reccomended, but not required.
|
||||
FeedURL string `json:"Feed_url,omitempty"`
|
||||
|
||||
// Description provides more detail, beyond the title, on what the Feed
|
||||
// is about. A Feed reader may display this text.
|
||||
Description string `json:"description,omitempty"`
|
||||
|
||||
// UserComment is a description of the purpose of the Feed. This is for
|
||||
// the use of people looking at the raw JSON, and should be ignored by
|
||||
// Feed readers.
|
||||
UserComment string `json:"user_comment,omitempty"`
|
||||
|
||||
// NextURL is the URL of a Feed that provides the next n items, where
|
||||
// n is determined by the publisher. This allows for pagination, but
|
||||
// with the expectation that reader software is not required to use it
|
||||
// and probably won’t use it very often. next_url must not be the same
|
||||
// as Feed_url, and it must not be the same as a previous next_url
|
||||
// (to avoid infinite loops).
|
||||
NextURL string `json:"next_url,omitempty"`
|
||||
|
||||
// Icon is the URL of an image for the Feed suitable to be used in a
|
||||
// timeline, much the way an avatar might be used. It should be square
|
||||
// and relatively large — such as 512 x 512 — so that it can be scaled-down
|
||||
// and so that it can look good on retina displays. It should use transparency
|
||||
// where appropriate, since it may be rendered on a non-white background.
|
||||
Icon string `json:"icon,omitempty"`
|
||||
|
||||
// Favicon is the URL of an image for the Feed suitable to be used in a
|
||||
// source list. It should be square and relatively small, but not smaller
|
||||
// than 64 x 64 (so that it can look good on retina displays). As with icon,
|
||||
// this image should use transparency where appropriate, since it may be
|
||||
// rendered on a non-white background.
|
||||
Favicon string `json:"favicon,omitempty"`
|
||||
|
||||
// Author specifies the Feed author.
|
||||
Author Author `json:"author,omitempty"`
|
||||
|
||||
// Expired specifies if the Feed will never update again. A Feed for a
|
||||
// temporary event, such as an instance of the Olympics, could expire.
|
||||
// If the value is true, then it’s expired. Any other value, or the
|
||||
// absence of expired, means the Feed may continue to update.
|
||||
Expired bool `json:"expired,omitempty"`
|
||||
|
||||
// Hubs describes endpoints that can be used to subscribe to real-time
|
||||
// notifications from the publisher of this Feed.
|
||||
Hubs []Hub `json:"hubs,omitempty"`
|
||||
|
||||
// Items is the list of Items in this Feed.
|
||||
Items []Item `json:"items"`
|
||||
}
|
||||
|
||||
// Parse reads a JSON feed object out of a reader.
|
||||
func Parse(r io.Reader) (Feed, error) {
|
||||
var feed Feed
|
||||
decoder := json.NewDecoder(r)
|
||||
if err := decoder.Decode(&feed); err != nil {
|
||||
return Feed{}, err
|
||||
}
|
||||
return feed, nil
|
||||
}
|
|
@ -43,7 +43,17 @@ func (t *TextFormatter) Format(e Event) ([]byte, error) {
|
|||
writer.WriteString(e.Time.Format(t.TimeFormat))
|
||||
writer.WriteString("\"")
|
||||
|
||||
for k, v := range e.Data {
|
||||
keys := make([]string, len(e.Data))
|
||||
i := 0
|
||||
|
||||
for k := range e.Data {
|
||||
keys[i] = k
|
||||
i++
|
||||
}
|
||||
|
||||
for _, k := range keys {
|
||||
v := e.Data[k]
|
||||
|
||||
writer.WriteByte(' ')
|
||||
if shouldQuote(k) {
|
||||
writer.WriteString(fmt.Sprintf("%q", k))
|
||||
|
|
|
@ -0,0 +1,142 @@
|
|||
// Package zipexe attempts to open an executable binary file as a zip file.
|
||||
package zipexe
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"debug/elf"
|
||||
"debug/macho"
|
||||
"debug/pe"
|
||||
"errors"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
// Opens a zip file by path.
|
||||
func Open(path string) (*zip.Reader, error) {
|
||||
_, rd, err := OpenCloser(path)
|
||||
return rd, err
|
||||
}
|
||||
|
||||
// OpenCloser is like Open but returns an additional Closer to avoid leaking open files.
|
||||
func OpenCloser(path string) (io.Closer, *zip.Reader, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
finfo, err := file.Stat()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
zr, err := NewReader(file, finfo.Size())
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return file, zr, nil
|
||||
}
|
||||
|
||||
// Open a zip file, specially handling various binaries that may have been
|
||||
// augmented with zip data.
|
||||
func NewReader(rda io.ReaderAt, size int64) (*zip.Reader, error) {
|
||||
handlers := []func(io.ReaderAt, int64) (*zip.Reader, error){
|
||||
zip.NewReader,
|
||||
zipExeReaderMacho,
|
||||
zipExeReaderElf,
|
||||
zipExeReaderPe,
|
||||
}
|
||||
|
||||
for _, handler := range handlers {
|
||||
zfile, err := handler(rda, size)
|
||||
if err == nil {
|
||||
return zfile, nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("Couldn't Open As Executable")
|
||||
}
|
||||
|
||||
// zipExeReaderMacho treats the file as a Mach-O binary
|
||||
// (Mac OS X / Darwin executable) and attempts to find a zip archive.
|
||||
func zipExeReaderMacho(rda io.ReaderAt, size int64) (*zip.Reader, error) {
|
||||
file, err := macho.NewFile(rda)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var max int64
|
||||
for _, load := range file.Loads {
|
||||
seg, ok := load.(*macho.Segment)
|
||||
if ok {
|
||||
// Check if the segment contains a zip file
|
||||
if zfile, err := zip.NewReader(seg, int64(seg.Filesz)); err == nil {
|
||||
return zfile, nil
|
||||
}
|
||||
|
||||
// Otherwise move end of file pointer
|
||||
end := int64(seg.Offset + seg.Filesz)
|
||||
if end > max {
|
||||
max = end
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No zip file within binary, try appended to end
|
||||
section := io.NewSectionReader(rda, max, size-max)
|
||||
return zip.NewReader(section, section.Size())
|
||||
}
|
||||
|
||||
// zipExeReaderPe treats the file as a Portable Exectuable binary
|
||||
// (Windows executable) and attempts to find a zip archive.
|
||||
func zipExeReaderPe(rda io.ReaderAt, size int64) (*zip.Reader, error) {
|
||||
file, err := pe.NewFile(rda)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var max int64
|
||||
for _, sec := range file.Sections {
|
||||
// Check if this section has a zip file
|
||||
if zfile, err := zip.NewReader(sec, int64(sec.Size)); err == nil {
|
||||
return zfile, nil
|
||||
}
|
||||
|
||||
// Otherwise move end of file pointer
|
||||
end := int64(sec.Offset + sec.Size)
|
||||
if end > max {
|
||||
max = end
|
||||
}
|
||||
}
|
||||
|
||||
// No zip file within binary, try appended to end
|
||||
section := io.NewSectionReader(rda, max, size-max)
|
||||
return zip.NewReader(section, section.Size())
|
||||
}
|
||||
|
||||
// zipExeReaderElf treats the file as a ELF binary
|
||||
// (linux/BSD/etc... executable) and attempts to find a zip archive.
|
||||
func zipExeReaderElf(rda io.ReaderAt, size int64) (*zip.Reader, error) {
|
||||
file, err := elf.NewFile(rda)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var max int64
|
||||
for _, sect := range file.Sections {
|
||||
if sect.Type == elf.SHT_NOBITS {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if this section has a zip file
|
||||
if zfile, err := zip.NewReader(sect, int64(sect.Size)); err == nil {
|
||||
return zfile, nil
|
||||
}
|
||||
|
||||
// Otherwise move end of file pointer
|
||||
end := int64(sect.Offset + sect.Size)
|
||||
if end > max {
|
||||
max = end
|
||||
}
|
||||
}
|
||||
|
||||
// No zip file within binary, try appended to end
|
||||
section := io.NewSectionReader(rda, max, size-max)
|
||||
return zip.NewReader(section, section.Size())
|
||||
}
|
|
@ -1,144 +0,0 @@
|
|||
// Package front is a frontmatter extraction library.
|
||||
package front
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var (
|
||||
//ErrIsEmpty is an error indicating no front matter was found
|
||||
ErrIsEmpty = errors.New("front: an empty file")
|
||||
|
||||
//ErrUnknownDelim is returned when the delimiters are not known by the
|
||||
//FrontMatter implementation.
|
||||
ErrUnknownDelim = errors.New("front: unknown delim")
|
||||
)
|
||||
|
||||
type (
|
||||
//HandlerFunc is an interface for a function that process front matter text.
|
||||
HandlerFunc func(string) (map[string]interface{}, error)
|
||||
)
|
||||
|
||||
//Matter is all what matters here.
|
||||
type Matter struct {
|
||||
handlers map[string]HandlerFunc
|
||||
}
|
||||
|
||||
//NewMatter creates a new Matter instance
|
||||
func NewMatter() *Matter {
|
||||
return &Matter{handlers: make(map[string]HandlerFunc)}
|
||||
}
|
||||
|
||||
//Handle registers a handler for the given frontmatter delimiter
|
||||
func (m *Matter) Handle(delim string, fn HandlerFunc) {
|
||||
m.handlers[delim] = fn
|
||||
}
|
||||
|
||||
// Parse parses the input and extract the frontmatter
|
||||
func (m *Matter) Parse(input io.Reader) (front map[string]interface{}, body string, err error) {
|
||||
return m.parse(input)
|
||||
}
|
||||
func (m *Matter) parse(input io.Reader) (front map[string]interface{}, body string, err error) {
|
||||
var getFront = func(f string) string {
|
||||
return strings.TrimSpace(f[3:])
|
||||
}
|
||||
f, body, err := m.splitFront(input)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
h := m.handlers[f[:3]]
|
||||
front, err = h(getFront(f))
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
return front, body, nil
|
||||
|
||||
}
|
||||
func sniffDelim(input []byte) (string, error) {
|
||||
if len(input) < 4 {
|
||||
return "", ErrIsEmpty
|
||||
}
|
||||
return string(input[:3]), nil
|
||||
}
|
||||
|
||||
func (m *Matter) splitFront(input io.Reader) (front, body string, err error) {
|
||||
bufsize := 1024 * 1024
|
||||
buf := make([]byte, bufsize)
|
||||
|
||||
s := bufio.NewScanner(input)
|
||||
// Necessary so we can handle larger than default 4096b buffer
|
||||
s.Buffer(buf, bufsize)
|
||||
|
||||
rst := make([]string, 2)
|
||||
s.Split(m.split)
|
||||
n := 0
|
||||
for s.Scan() {
|
||||
if n == 0 {
|
||||
rst[0] = s.Text()
|
||||
} else if n == 1 {
|
||||
rst[1] = s.Text()
|
||||
}
|
||||
n++
|
||||
}
|
||||
if err = s.Err(); err != nil {
|
||||
return
|
||||
}
|
||||
return rst[0], rst[1], nil
|
||||
}
|
||||
|
||||
//split implements bufio.SplitFunc for spliting fron matter from the body text.
|
||||
func (m *Matter) split(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||
if atEOF && len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
delim, err := sniffDelim(data)
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
if _, ok := m.handlers[delim]; !ok {
|
||||
return 0, nil, ErrUnknownDelim
|
||||
}
|
||||
if x := bytes.Index(data, []byte(delim)); x >= 0 {
|
||||
// check the next delim index
|
||||
if next := bytes.Index(data[x+len(delim):], []byte(delim)); next > 0 {
|
||||
return next + len(delim), dropSpace(data[:next+len(delim)]), nil
|
||||
}
|
||||
return len(data), dropSpace(data[x+len(delim):]), nil
|
||||
}
|
||||
if atEOF {
|
||||
return len(data), data, nil
|
||||
}
|
||||
return 0, nil, nil
|
||||
}
|
||||
|
||||
func dropSpace(d []byte) []byte {
|
||||
return bytes.TrimSpace(d)
|
||||
}
|
||||
|
||||
//JSONHandler implements HandlerFunc interface. It extracts front matter data from the given
|
||||
// string argument by interpreting it as a json string.
|
||||
func JSONHandler(front string) (map[string]interface{}, error) {
|
||||
var rst interface{}
|
||||
err := json.Unmarshal([]byte(front), &rst)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rst.(map[string]interface{}), nil
|
||||
}
|
||||
|
||||
//YAMLHandler decodes ymal string into a go map[string]interface{}
|
||||
func YAMLHandler(front string) (map[string]interface{}, error) {
|
||||
out := make(map[string]interface{})
|
||||
err := yaml.Unmarshal([]byte(front), out)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
|
@ -0,0 +1,237 @@
|
|||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package agent provides hooks programs can register to retrieve
|
||||
// diagnostics data by using gops.
|
||||
package agent
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"os"
|
||||
gosignal "os/signal"
|
||||
"runtime"
|
||||
"runtime/pprof"
|
||||
"runtime/trace"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"bufio"
|
||||
|
||||
"github.com/google/gops/internal"
|
||||
"github.com/google/gops/signal"
|
||||
"github.com/kardianos/osext"
|
||||
)
|
||||
|
||||
const defaultAddr = "127.0.0.1:0"
|
||||
|
||||
var (
|
||||
mu sync.Mutex
|
||||
portfile string
|
||||
listener net.Listener
|
||||
|
||||
units = []string{" bytes", "KB", "MB", "GB", "TB", "PB"}
|
||||
)
|
||||
|
||||
// Options allows configuring the started agent.
|
||||
type Options struct {
|
||||
// Addr is the host:port the agent will be listening at.
|
||||
// Optional.
|
||||
Addr string
|
||||
|
||||
// NoShutdownCleanup tells the agent not to automatically cleanup
|
||||
// resources if the running process receives an interrupt.
|
||||
// Optional.
|
||||
NoShutdownCleanup bool
|
||||
}
|
||||
|
||||
// Listen starts the gops agent on a host process. Once agent started, users
|
||||
// can use the advanced gops features. The agent will listen to Interrupt
|
||||
// signals and exit the process, if you need to perform further work on the
|
||||
// Interrupt signal use the options parameter to configure the agent
|
||||
// accordingly.
|
||||
//
|
||||
// Note: The agent exposes an endpoint via a TCP connection that can be used by
|
||||
// any program on the system. Review your security requirements before starting
|
||||
// the agent.
|
||||
func Listen(opts *Options) error {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
if opts == nil {
|
||||
opts = &Options{}
|
||||
}
|
||||
if portfile != "" {
|
||||
return fmt.Errorf("gops: agent already listening at: %v", listener.Addr())
|
||||
}
|
||||
|
||||
gopsdir, err := internal.ConfigDir()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = os.MkdirAll(gopsdir, os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !opts.NoShutdownCleanup {
|
||||
gracefulShutdown()
|
||||
}
|
||||
|
||||
addr := opts.Addr
|
||||
if addr == "" {
|
||||
addr = defaultAddr
|
||||
}
|
||||
ln, err := net.Listen("tcp", addr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
listener = ln
|
||||
port := listener.Addr().(*net.TCPAddr).Port
|
||||
portfile = fmt.Sprintf("%s/%d", gopsdir, os.Getpid())
|
||||
err = ioutil.WriteFile(portfile, []byte(strconv.Itoa(port)), os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
go listen()
|
||||
return nil
|
||||
}
|
||||
|
||||
func listen() {
|
||||
buf := make([]byte, 1)
|
||||
for {
|
||||
fd, err := listener.Accept()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "gops: %v", err)
|
||||
if netErr, ok := err.(net.Error); ok && !netErr.Temporary() {
|
||||
break
|
||||
}
|
||||
continue
|
||||
}
|
||||
if _, err := fd.Read(buf); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "gops: %v", err)
|
||||
continue
|
||||
}
|
||||
if err := handle(fd, buf); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "gops: %v", err)
|
||||
continue
|
||||
}
|
||||
fd.Close()
|
||||
}
|
||||
}
|
||||
|
||||
func gracefulShutdown() {
|
||||
c := make(chan os.Signal, 1)
|
||||
gosignal.Notify(c, os.Interrupt)
|
||||
go func() {
|
||||
// cleanup the socket on shutdown.
|
||||
<-c
|
||||
Close()
|
||||
os.Exit(1)
|
||||
}()
|
||||
}
|
||||
|
||||
// Close closes the agent, removing temporary files and closing the TCP listener.
|
||||
// If no agent is listening, Close does nothing.
|
||||
func Close() {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
if portfile != "" {
|
||||
os.Remove(portfile)
|
||||
portfile = ""
|
||||
}
|
||||
if listener != nil {
|
||||
listener.Close()
|
||||
}
|
||||
}
|
||||
|
||||
func formatBytes(val uint64) string {
|
||||
var i int
|
||||
var target uint64
|
||||
for i = range units {
|
||||
target = 1 << uint(10*(i+1))
|
||||
if val < target {
|
||||
break
|
||||
}
|
||||
}
|
||||
if i > 0 {
|
||||
return fmt.Sprintf("%0.2f%s (%d bytes)", float64(val)/(float64(target)/1024), units[i], val)
|
||||
}
|
||||
return fmt.Sprintf("%d bytes", val)
|
||||
}
|
||||
|
||||
func handle(conn io.Writer, msg []byte) error {
|
||||
switch msg[0] {
|
||||
case signal.StackTrace:
|
||||
return pprof.Lookup("goroutine").WriteTo(conn, 2)
|
||||
case signal.GC:
|
||||
runtime.GC()
|
||||
_, err := conn.Write([]byte("ok"))
|
||||
return err
|
||||
case signal.MemStats:
|
||||
var s runtime.MemStats
|
||||
runtime.ReadMemStats(&s)
|
||||
fmt.Fprintf(conn, "alloc: %v\n", formatBytes(s.Alloc))
|
||||
fmt.Fprintf(conn, "total-alloc: %v\n", formatBytes(s.TotalAlloc))
|
||||
fmt.Fprintf(conn, "sys: %v\n", formatBytes(s.Sys))
|
||||
fmt.Fprintf(conn, "lookups: %v\n", s.Lookups)
|
||||
fmt.Fprintf(conn, "mallocs: %v\n", s.Mallocs)
|
||||
fmt.Fprintf(conn, "frees: %v\n", s.Frees)
|
||||
fmt.Fprintf(conn, "heap-alloc: %v\n", formatBytes(s.HeapAlloc))
|
||||
fmt.Fprintf(conn, "heap-sys: %v\n", formatBytes(s.HeapSys))
|
||||
fmt.Fprintf(conn, "heap-idle: %v\n", formatBytes(s.HeapIdle))
|
||||
fmt.Fprintf(conn, "heap-in-use: %v\n", formatBytes(s.HeapInuse))
|
||||
fmt.Fprintf(conn, "heap-released: %v\n", formatBytes(s.HeapReleased))
|
||||
fmt.Fprintf(conn, "heap-objects: %v\n", s.HeapObjects)
|
||||
fmt.Fprintf(conn, "stack-in-use: %v\n", formatBytes(s.StackInuse))
|
||||
fmt.Fprintf(conn, "stack-sys: %v\n", formatBytes(s.StackSys))
|
||||
fmt.Fprintf(conn, "next-gc: when heap-alloc >= %v\n", formatBytes(s.NextGC))
|
||||
lastGC := "-"
|
||||
if s.LastGC != 0 {
|
||||
lastGC = fmt.Sprint(time.Unix(0, int64(s.LastGC)))
|
||||
}
|
||||
fmt.Fprintf(conn, "last-gc: %v\n", lastGC)
|
||||
fmt.Fprintf(conn, "gc-pause: %v\n", time.Duration(s.PauseTotalNs))
|
||||
fmt.Fprintf(conn, "num-gc: %v\n", s.NumGC)
|
||||
fmt.Fprintf(conn, "enable-gc: %v\n", s.EnableGC)
|
||||
fmt.Fprintf(conn, "debug-gc: %v\n", s.DebugGC)
|
||||
case signal.Version:
|
||||
fmt.Fprintf(conn, "%v\n", runtime.Version())
|
||||
case signal.HeapProfile:
|
||||
pprof.WriteHeapProfile(conn)
|
||||
case signal.CPUProfile:
|
||||
if err := pprof.StartCPUProfile(conn); err != nil {
|
||||
return err
|
||||
}
|
||||
time.Sleep(30 * time.Second)
|
||||
pprof.StopCPUProfile()
|
||||
case signal.Stats:
|
||||
fmt.Fprintf(conn, "goroutines: %v\n", runtime.NumGoroutine())
|
||||
fmt.Fprintf(conn, "OS threads: %v\n", pprof.Lookup("threadcreate").Count())
|
||||
fmt.Fprintf(conn, "GOMAXPROCS: %v\n", runtime.GOMAXPROCS(0))
|
||||
fmt.Fprintf(conn, "num CPU: %v\n", runtime.NumCPU())
|
||||
case signal.BinaryDump:
|
||||
path, err := osext.Executable()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
_, err = bufio.NewReader(f).WriteTo(conn)
|
||||
return err
|
||||
case signal.Trace:
|
||||
trace.Start(conn)
|
||||
time.Sleep(5 * time.Second)
|
||||
trace.Stop()
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
package internal
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/user"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func ConfigDir() (string, error) {
|
||||
if runtime.GOOS == "windows" {
|
||||
return filepath.Join(os.Getenv("APPDATA"), "gops"), nil
|
||||
}
|
||||
homeDir := guessUnixHomeDir()
|
||||
if homeDir == "" {
|
||||
return "", errors.New("unable to get current user home directory: os/user lookup failed; $HOME is empty")
|
||||
}
|
||||
return filepath.Join(homeDir, ".config", "gops"), nil
|
||||
}
|
||||
|
||||
func guessUnixHomeDir() string {
|
||||
usr, err := user.Current()
|
||||
if err == nil {
|
||||
return usr.HomeDir
|
||||
}
|
||||
return os.Getenv("HOME")
|
||||
}
|
||||
|
||||
func PIDFile(pid int) (string, error) {
|
||||
gopsdir, err := ConfigDir()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return fmt.Sprintf("%s/%d", gopsdir, pid), nil
|
||||
}
|
||||
|
||||
func GetPort(pid int) (string, error) {
|
||||
portfile, err := PIDFile(pid)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
b, err := ioutil.ReadFile(portfile)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
port := strings.TrimSpace(string(b))
|
||||
return port, nil
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package signal contains signals used to communicate to the gops agents.
|
||||
package signal
|
||||
|
||||
const (
|
||||
// StackTrace represents a command to print stack trace.
|
||||
StackTrace = byte(0x1)
|
||||
|
||||
// GC runs the garbage collector.
|
||||
GC = byte(0x2)
|
||||
|
||||
// MemStats reports memory stats.
|
||||
MemStats = byte(0x3)
|
||||
|
||||
// Version prints the Go version.
|
||||
Version = byte(0x4)
|
||||
|
||||
// HeapProfile starts `go tool pprof` with the current memory profile.
|
||||
HeapProfile = byte(0x5)
|
||||
|
||||
// CPUProfile starts `go tool pprof` with the current CPU profile
|
||||
CPUProfile = byte(0x6)
|
||||
|
||||
// Stats returns Go runtime statistics such as number of goroutines, GOMAXPROCS, and NumCPU.
|
||||
Stats = byte(0x7)
|
||||
|
||||
// Trace starts the Go execution tracer, waits 5 seconds and launches the trace tool.
|
||||
Trace = byte(0x8)
|
||||
|
||||
// BinaryDump returns running binary file.
|
||||
BinaryDump = byte(0x9)
|
||||
)
|