backend: serve files from a .asar

This commit is contained in:
Cadey Ratio 2017-01-09 13:41:39 -08:00
parent 49bbd5d052
commit b288844193
12 changed files with 840 additions and 14 deletions

View File

@ -12,6 +12,7 @@ import (
"strings"
"time"
"github.com/Xe/asarfs"
"github.com/gernest/front"
)
@ -126,8 +127,23 @@ func main() {
Body: rbody,
})
})
http.Handle("/dist/", http.FileServer(http.Dir("./frontend/static/")))
http.Handle("/static/", http.FileServer(http.Dir(".")))
if os.Getenv("USE_ASAR") == "yes" {
fe, err := asarfs.New("./frontend.asar", http.HandlerFunc(writeIndexHTML))
if err != nil {
log.Fatal(err)
}
st, err := asarfs.New("./static.asar", http.HandlerFunc(writeIndexHTML))
if err != nil {
log.Fatal(err)
}
http.Handle("/dist/", fe)
http.Handle("/static", st)
} else {
http.Handle("/dist/", http.FileServer(http.Dir("./frontend/static/")))
http.Handle("/static/", http.FileServer(http.Dir(".")))
}
http.HandleFunc("/", writeIndexHTML)
port := os.Getenv("PORT")

View File

@ -1,2 +1,4 @@
94c8a5673a78ada68d7b97e1d4657cffc6ec68d7 github.com/gernest/front
a5b47d31c556af34a302ce5d659e6fea44d90de0 gopkg.in/yaml.v2
b68094ba95c055dfda888baa8947dfe44c20b1ac github.com/Xe/asarfs
5e4d0891fe789f2da0c2d5afada3b6a1ede6d64c layeh.com/asar

View File

@ -0,0 +1,59 @@
package asarfs
import (
"mime"
"net/http"
"os"
"path/filepath"
"strings"
"layeh.com/asar"
)
type ASARfs struct {
fin *os.File
ar *asar.Entry
notFound http.Handler
}
func (a *ASARfs) Close() error {
return a.fin.Close()
}
func (a *ASARfs) ServeHTTP(w http.ResponseWriter, r *http.Request) {
if r.RequestURI == "/" {
r.RequestURI = "/index.html"
}
f := a.ar.Find(strings.Split(r.RequestURI, "/")[1:]...)
if f == nil {
a.notFound.ServeHTTP(w, r)
return
}
ext := filepath.Ext(f.Name)
mimeType := mime.TypeByExtension(ext)
w.Header().Add("Content-Type", mimeType)
f.WriteTo(w)
}
func New(archivePath string, notFound http.Handler) (*ASARfs, error) {
fin, err := os.Open(archivePath)
if err != nil {
return nil, err
}
root, err := asar.Decode(fin)
if err != nil {
return nil, err
}
a := &ASARfs{
fin: fin,
ar: root,
notFound: notFound,
}
return a, nil
}

View File

@ -0,0 +1,24 @@
// +build ignore
package main
import (
"log"
"net/http"
"os"
"github.com/Xe/asarfs"
)
func do404(w http.ResponseWriter, r *http.Request) {
http.Error(w, "Not found", http.StatusNotFound)
}
func main() {
fs, err := asarfs.New("./static.asar", http.HandlerFunc(do404))
if err != nil {
log.Fatal(err)
}
http.ListenAndServe(":"+os.Getenv("PORT"), fs)
}

View File

@ -0,0 +1,80 @@
package asar // import "layeh.com/asar"
import (
"io"
"strings"
)
// Builder helps construct an Entry.
//
// A builder keeps track of the root Entry and the active Entry. When entries
// are added using the Add* methods, they are added as children to the active
// Entry.
type Builder struct {
root, current *Entry
}
// Root returns the root Entry.
func (b *Builder) Root() *Entry {
return b.root
}
func (b *Builder) init() {
if b.root == nil {
b.root = &Entry{
Flags: FlagDir,
}
b.current = b.root
}
}
// Parent sets the active entry to the parent of the active Entry (i.e. moves up
// a level).
//
// The function panics if called on the root Entry.
func (b *Builder) Parent() *Builder {
if b.current == b.root {
panic("root has no parent")
}
b.current = b.current.Parent
return b
}
// AddString adds a new file Entry whose contents are the given string.
func (b *Builder) AddString(name, contents string, flags Flag) *Builder {
return b.Add(name, strings.NewReader(contents), int64(len(contents)), flags)
}
// Add adds a new file Entry.
func (b *Builder) Add(name string, ra io.ReaderAt, size int64, flags Flag) *Builder {
b.init()
child := &Entry{
Name: name,
Size: size,
Flags: flags,
Parent: b.current,
r: ra,
}
b.current.Children = append(b.current.Children, child)
return b
}
// AddDir adds a new directory Entry. The active Entry is switched to this newly
// added Entry.
func (b *Builder) AddDir(name string, flags Flag) *Builder {
b.init()
child := &Entry{
Name: name,
Flags: flags | FlagDir,
Parent: b.current,
}
b.current.Children = append(b.current.Children, child)
b.current = child
return b
}

View File

@ -0,0 +1,64 @@
package asar // import "layeh.com/asar"
import (
"encoding/binary"
"errors"
"io"
)
var (
errMalformed = errors.New("asar: malformed archive")
)
// Decode decodes the ASAR archive in ra.
//
// Returns the root element and nil on success. nil and an error is returned on
// failure.
func Decode(ra io.ReaderAt) (*Entry, error) {
headerSize := uint32(0)
headerStringSize := uint32(0)
// [pickle object header (4 bytes) == 4]
// [pickle uint32 = $header_object_size]
{
var buff [8]byte
if n, _ := ra.ReadAt(buff[:], 0); n != 8 {
return nil, errMalformed
}
dataSize := binary.LittleEndian.Uint32(buff[:4])
if dataSize != 4 {
return nil, errMalformed
}
headerSize = binary.LittleEndian.Uint32(buff[4:8])
}
// [pickle object header (4 bytes)]
// [pickle data header (4 bytes) == $string_size]
// [pickle string ($string_size bytes)]
{
var buff [8]byte
if n, _ := ra.ReadAt(buff[:], 8); n != 8 {
return nil, errMalformed
}
headerObjectSize := binary.LittleEndian.Uint32(buff[:4])
if headerObjectSize != headerSize-4 {
return nil, errMalformed
}
headerStringSize = binary.LittleEndian.Uint32(buff[4:8])
}
// read header string
headerSection := io.NewSectionReader(ra, 8+8, int64(headerStringSize))
baseOffset := 8 + 8 + int64(headerStringSize)
baseOffset += baseOffset % 4 // pickle objects are uint32 aligned
root, err := decodeHeader(ra, headerSection, baseOffset)
if err != nil {
return nil, err
}
return root, nil
}

View File

@ -0,0 +1,2 @@
// Package asar reads and writes ASAR (Atom-Shell Archive) archives.
package asar // import "layeh.com/asar"

View File

@ -0,0 +1,122 @@
package asar // import "layeh.com/asar"
import (
"bytes"
"encoding/binary"
"encoding/json"
"io"
"strconv"
)
type entryEncoder struct {
Contents []io.Reader
CurrentOffset int64
Header bytes.Buffer
Encoder *json.Encoder
}
func (enc *entryEncoder) Write(v interface{}) {
enc.Encoder.Encode(v)
enc.Header.Truncate(enc.Header.Len() - 1) // cut off trailing new line
}
func (enc *entryEncoder) WriteField(key string, v interface{}) {
enc.Write(key)
enc.Header.WriteByte(':')
enc.Write(v)
}
func (enc *entryEncoder) Encode(e *Entry) error {
enc.Header.WriteByte('{')
if e.Flags&FlagDir != 0 {
enc.Write("files")
enc.Header.WriteString(":{")
for i, child := range e.Children {
if i > 0 {
enc.Header.WriteByte(',')
}
if !validFilename(child.Name) {
panic(errHeader)
}
enc.Write(child.Name)
enc.Header.WriteByte(':')
if err := enc.Encode(child); err != nil {
return err
}
}
enc.Header.WriteByte('}')
} else {
enc.Write("size")
enc.Header.WriteByte(':')
enc.Write(e.Size)
if e.Flags&FlagExecutable != 0 {
enc.Header.WriteByte(',')
enc.WriteField("executable", true)
}
enc.Header.WriteByte(',')
if e.Flags&FlagUnpacked == 0 {
enc.WriteField("offset", strconv.FormatInt(enc.CurrentOffset, 10))
enc.CurrentOffset += e.Size
enc.Contents = append(enc.Contents, io.NewSectionReader(e.r, e.baseOffset, e.Size))
} else {
enc.WriteField("unpacked", true)
}
}
enc.Header.WriteByte('}')
return nil
}
// EncodeTo writes an ASAR archive containing Entry's descendants. This function
// is usally called on the root entry.
func (e *Entry) EncodeTo(w io.Writer) (n int64, err error) {
defer func() {
if r := recover(); r != nil {
if e := r.(error); e != nil {
err = e
} else {
panic(r)
}
}
}()
encoder := entryEncoder{}
{
var reserve [16]byte
encoder.Header.Write(reserve[:])
}
encoder.Encoder = json.NewEncoder(&encoder.Header)
if err = encoder.Encode(e); err != nil {
return
}
{
var padding [3]byte
encoder.Header.Write(padding[:encoder.Header.Len()%4])
}
header := encoder.Header.Bytes()
binary.LittleEndian.PutUint32(header[:4], 4)
binary.LittleEndian.PutUint32(header[4:8], 8+uint32(encoder.Header.Len()))
binary.LittleEndian.PutUint32(header[8:12], 4+uint32(encoder.Header.Len()))
binary.LittleEndian.PutUint32(header[12:16], uint32(encoder.Header.Len()))
n, err = encoder.Header.WriteTo(w)
if err != nil {
return
}
for _, chunk := range encoder.Contents {
var written int64
written, err = io.Copy(w, chunk)
n += written
if err != nil {
return
}
}
return
}

View File

@ -0,0 +1,227 @@
package asar // import "layeh.com/asar"
import (
"errors"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
// Flag is a bit field of Entry flags.
type Flag uint32
const (
// FlagNone denotes an entry with no flags.
FlagNone Flag = 0
// FlagDir denotes a directory entry.
FlagDir Flag = 1 << iota
// FlagExecutable denotes a file with the executable bit set.
FlagExecutable
// FlagUnpacked denotes that the entry's contents are not included in
// the archive.
FlagUnpacked
)
// Entry is a file or a folder in an ASAR archive.
type Entry struct {
Name string
Size int64
Offset int64
Flags Flag
Parent *Entry
Children []*Entry
r io.ReaderAt
baseOffset int64
}
// New creates a new Entry.
func New(name string, ra io.ReaderAt, size, offset int64, flags Flag) *Entry {
return &Entry{
Name: name,
Size: size,
Offset: offset,
Flags: flags,
r: ra,
}
}
// FileInfo returns the os.FileInfo information about the entry.
func (e *Entry) FileInfo() os.FileInfo {
return fileInfo{e}
}
type fileInfo struct {
e *Entry
}
func (f fileInfo) Name() string {
return f.e.Name
}
func (f fileInfo) Size() int64 {
return f.e.Size
}
func (f fileInfo) Mode() os.FileMode {
if f.e.Flags&FlagDir != 0 {
return 0555 | os.ModeDir
}
if f.e.Flags&FlagExecutable != 0 {
return 0555
}
return 0444
}
func (f fileInfo) ModTime() time.Time {
return time.Time{}
}
func (f fileInfo) IsDir() bool {
return f.e.Flags&FlagDir != 0
}
func (f fileInfo) Sys() interface{} {
return f.e
}
// Path returns the file path to the entry.
//
// For example, given the following tree structure:
// root
// - sub1
// - sub2
// - file2.jpg
//
// file2.jpg's path would be:
// sub2/file2.jpg
func (e *Entry) Path() string {
if e.Parent == nil {
return ""
}
var p []string
for e != nil && e.Parent != nil {
p = append(p, e.Name)
e = e.Parent
}
l := len(p) / 2
for i := 0; i < l; i++ {
j := len(p) - i - 1
p[i], p[j] = p[j], p[i]
}
return strings.Join(p, "/")
}
// Open returns an *io.SectionReader of the entry's contents. nil is returned if
// the entry cannot be opened (e.g. because it is a directory).
func (e *Entry) Open() *io.SectionReader {
if e.Flags&FlagDir != 0 || e.Flags&FlagUnpacked != 0 {
return nil
}
return io.NewSectionReader(e.r, e.baseOffset+e.Offset, e.Size)
}
// WriteTo writes the entry's contents to the given writer. If the entry cannot
// be opened (e.g. if the entry is a directory).
func (e *Entry) WriteTo(w io.Writer) (n int64, err error) {
r := e.Open()
if r == nil {
return 0, errors.New("asar: entry cannot be opened")
}
return io.Copy(w, r)
}
// Bytes returns the entry's contents as a byte slice. nil is returned if the
// entry cannot be read.
func (e *Entry) Bytes() []byte {
body := e.Open()
if body == nil {
return nil
}
b, err := ioutil.ReadAll(body)
if err != nil {
return nil
}
return b
}
// Bytes returns the entry's contents as a string. nil is returned if the entry
// cannot be read.
func (e *Entry) String() string {
body := e.Bytes()
if body == nil {
return ""
}
return string(body)
}
// Find searches for a sub-entry of the current entry. nil is returned if the
// requested sub-entry cannot be found.
//
// For example, given the following tree structure:
// root
// - sub1
// - sub2
// - sub2.1
// - file2.jpg
//
// The following expression would return the .jpg *Entry:
// root.Find("sub2", "sub2.1", "file2.jpg")
func (e *Entry) Find(path ...string) *Entry {
pathLoop:
for _, name := range path {
for _, child := range e.Children {
if child.Name == name {
e = child
continue pathLoop
}
}
return nil
}
return e
}
// Walk recursively walks over the entry's children. See filepath.Walk and
// filepath.WalkFunc for more information.
func (e *Entry) Walk(walkFn filepath.WalkFunc) error {
return walk(e, "", walkFn)
}
func walk(e *Entry, parentPath string, walkFn filepath.WalkFunc) error {
for i := 0; i < len(e.Children); i++ {
child := e.Children[i]
childPath := parentPath + child.Name
err := walkFn(childPath, child.FileInfo(), nil)
if err == filepath.SkipDir {
continue
}
if err != nil {
return err
}
if child.Flags&FlagDir == 0 {
continue
}
if err := walk(child, childPath+"/", walkFn); err != nil {
return err
}
}
return nil
}
func validFilename(filename string) bool {
if filename == "." || filename == ".." {
return false
}
return strings.IndexAny(filename, "\x00\\/") == -1
}

View File

@ -0,0 +1,212 @@
package asar // import "layeh.com/asar"
import (
"encoding/json"
"errors"
"io"
)
var (
errHeader = errors.New("asar: invalid file header")
)
type jsonReader struct {
ASAR io.ReaderAt
BaseOffset int64
D *json.Decoder
Token json.Token
}
func (j *jsonReader) Peek() json.Token {
if j.Token != nil {
return j.Token
}
tkn, err := j.D.Token()
if err != nil {
if err == io.EOF {
return nil
}
panic(err)
}
j.Token = tkn
return tkn
}
func (j *jsonReader) HasDelimRune(r rune) bool {
peek := j.Peek()
ru, ok := peek.(json.Delim)
if !ok {
return false
}
if rune(ru) != r {
return false
}
return true
}
func (j *jsonReader) Next() json.Token {
if j.Token != nil {
t := j.Token
j.Token = nil
return t
}
tkn, err := j.D.Token()
if err != nil {
if err == io.EOF {
return nil
}
panic(err)
}
return tkn
}
func (j *jsonReader) NextDelimRune() rune {
tkn := j.Next()
r, ok := tkn.(json.Delim)
if !ok {
panic(errHeader)
}
return rune(r)
}
func (j *jsonReader) ExpectDelim(r rune) {
next := j.NextDelimRune()
if next != r {
panic(errHeader)
}
}
func (j *jsonReader) ExpectBool() bool {
tkn := j.Next()
b, ok := tkn.(bool)
if !ok {
panic(errHeader)
}
return b
}
func (j *jsonReader) ExpectString() string {
next := j.Next()
str, ok := next.(string)
if !ok {
panic(errHeader)
}
return str
}
func (j *jsonReader) ExpectStringVal(val string) {
str := j.ExpectString()
if str != val {
panic(errHeader)
}
}
func (j *jsonReader) ExpectInt64() int64 {
var number json.Number
switch j.Peek().(type) {
case string:
number = json.Number(j.ExpectString())
case json.Number:
number = j.Next().(json.Number)
default:
panic(errHeader)
}
val, err := number.Int64()
if err != nil {
panic(errHeader)
}
return val
}
func parseRoot(r *jsonReader) *Entry {
entry := &Entry{
Flags: FlagDir,
}
r.ExpectDelim('{')
r.ExpectStringVal("files")
parseFiles(r, entry)
r.ExpectDelim('}')
if r.Next() != nil {
panic(errHeader)
}
return entry
}
func parseFiles(r *jsonReader, parent *Entry) {
r.ExpectDelim('{')
for !r.HasDelimRune('}') {
parseEntry(r, parent)
}
r.ExpectDelim('}')
}
func parseEntry(r *jsonReader, parent *Entry) {
name := r.ExpectString()
if name == "" {
panic(errHeader)
}
if !validFilename(name) {
panic(errHeader)
}
r.ExpectDelim('{')
child := &Entry{
Name: name,
Parent: parent,
}
for !r.HasDelimRune('}') {
switch r.ExpectString() {
case "files":
child.Flags |= FlagDir
parseFiles(r, child)
case "size":
child.Size = r.ExpectInt64()
case "offset":
child.Offset = r.ExpectInt64()
case "unpacked":
if r.ExpectBool() {
child.Flags |= FlagUnpacked
}
case "executable":
if r.ExpectBool() {
child.Flags |= FlagExecutable
}
default:
panic(errHeader)
}
}
if child.Flags&FlagDir == 0 {
child.r = r.ASAR
child.baseOffset = r.BaseOffset
}
parent.Children = append(parent.Children, child)
r.ExpectDelim('}')
}
func decodeHeader(asar io.ReaderAt, header *io.SectionReader, offset int64) (entry *Entry, err error) {
decoder := json.NewDecoder(header)
decoder.UseNumber()
reader := jsonReader{
ASAR: asar,
BaseOffset: offset,
D: decoder,
}
defer func() {
if r := recover(); r != nil {
if e := r.(error); e != nil {
err = e
} else {
panic(r)
}
}
}()
entry = parseRoot(&reader)
return
}

34
box.rb
View File

@ -20,27 +20,37 @@ run %q[ cd /usr/local && curl -o node.tar.xz https://nodejs.org/dist/v6.9.2/node
### Copy files
run "mkdir -p /site"
copy "./backend", "/site/backend"
copy "./blog", "/site/blog"
copy "./frontend/package.json", "/site/frontend/package.json"
copy "./frontend/bower.json", "/site/frontend/bower.json"
copy "./frontend/webpack.production.config.js", "/site/frontend/webpack.production.config.js"
copy "./frontend/src", "/site/frontend/src"
copy "./frontend/support", "/site/frontend/support"
copy "./static", "/site/static"
copy "./build.sh", "/site/build.sh"
copy "./run.sh", "/site/run.sh"
def put(file)
copy "./#{file}", "/site/#{file}"
end
files = [
"backend",
"blog",
"frontend/package.json",
"frontend/bower.json",
"frontend/webpack.production.config.js",
"frontend/src",
"static",
"build.sh",
"run.sh",
]
files.each { |x| put(x) }
### Build
run %q[ cd /site && bash ./build.sh ]
### Cleanup
run %q[ rm -rf /usr/local/go /usr/local/node /site/frontend/node_modules /site/frontend/bower_components /go /site/backend ]
run %q[ apt-get remove -y xz-utils bzip2 git-core ]
run %q[ rm -rf /usr/local/go /usr/local/node /site/frontend/node_modules /site/frontend/bower_components /go /site/backend /tmp/phantomjs /site/frontend /site/static ]
run %q[ apt-get remove -y xz-utils bzip2 git-core && apt-get -y autoremove && apt-get clean ]
### Runtime
entrypoint "/sbin/my_init"
cmd "/site/run.sh"
env "USE_ASAR" => "yes"
flatten
tag "xena/christine.website"

View File

@ -5,15 +5,23 @@ set -x
export PATH="$PATH:/usr/local/go/bin:/usr/local/node/bin"
export CI="true"
npm install -g asar
(cd /site/frontend
yes | npm install
npm install -g bower
yes 2 | bower install --allow-root
npm run build
asar pack static ../frontend.asar
rm -rf bower_components node_modules) &
(cd /site/backend/christine.website
go build
mv christine.website /usr/bin) &
(cd /site
asar pack static ./static.asar) &
wait
rm -rf /usr/lib/node_modules