move some tools into a new contrib repo

This commit is contained in:
Cadey Ratio 2018-01-27 08:31:14 -08:00
parent 5e4f1618a4
commit 12e7f0c3cc
1919 changed files with 6 additions and 464447 deletions

View File

@ -1,4 +1,4 @@
FROM xena/go-mini:1.9.2
FROM xena/go-mini:1.9.3
ENV CGO_ENABLED=0
ENV PATH=$PATH:/root/go/bin

526
Gopkg.lock generated
View File

@ -1,24 +1,6 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
branch = "master"
name = "github.com/ThomasRooney/gexpect"
packages = ["."]
revision = "5482f03509440585d13d8f648989e05903001842"
[[projects]]
branch = "master"
name = "github.com/Xe/eclier"
packages = ["."]
revision = "3cde6c5f47044f4875c4b7fe6b12e4e6000608ea"
[[projects]]
branch = "master"
name = "github.com/Xe/gluanetrc"
packages = ["."]
revision = "af26c7928995089c19896bcc5d0f8ba48a7930a9"
[[projects]]
branch = "master"
name = "github.com/Xe/gopreload"
@ -40,15 +22,6 @@
packages = ["."]
revision = "62b230097e9c9534ca2074782b25d738c4b68964"
[[projects]]
branch = "master"
name = "github.com/Xe/x"
packages = [
"tools/glue/libs/gluaexpect",
"tools/glue/libs/gluasimplebox"
]
revision = "860ea0dedb8beb93b60717510eabca2ef5ffe150"
[[projects]]
branch = "master"
name = "github.com/aclements/go-moremath"
@ -64,18 +37,6 @@
]
revision = "8d6ce0550041f9d97e7f15ec27ed489f8bbbb0fb"
[[projects]]
name = "github.com/agext/levenshtein"
packages = ["."]
revision = "5f10fee965225ac1eecdc234c09daf5cd9e7f7b6"
version = "v1.2.1"
[[projects]]
branch = "master"
name = "github.com/ailncode/gluaxmlpath"
packages = ["."]
revision = "6ce478ecb4a60c4fc8929838e0b21b7fb7ca7440"
[[projects]]
branch = "master"
name = "github.com/alecthomas/template"
@ -91,24 +52,6 @@
packages = ["."]
revision = "2efee857e7cfd4f3d0138cc3cbb1b4966962b93a"
[[projects]]
branch = "master"
name = "github.com/apparentlymart/go-cidr"
packages = ["cidr"]
revision = "2bd8b58cf4275aeb086ade613de226773e29e853"
[[projects]]
branch = "master"
name = "github.com/apparentlymart/go-textseg"
packages = ["textseg"]
revision = "b836f5c4d331d1945a2fead7188db25432d73b69"
[[projects]]
branch = "master"
name = "github.com/armon/go-radix"
packages = ["."]
revision = "1fca145dffbcaa8fe914309b1ec0cfc67500fe61"
[[projects]]
name = "github.com/asdine/storm"
packages = [
@ -122,50 +65,6 @@
revision = "68fc73b635f890fe7ba2f3b15ce80c85b28a744f"
version = "v2.0.2"
[[projects]]
name = "github.com/aws/aws-sdk-go"
packages = [
"aws",
"aws/awserr",
"aws/awsutil",
"aws/client",
"aws/client/metadata",
"aws/corehandlers",
"aws/credentials",
"aws/credentials/ec2rolecreds",
"aws/credentials/endpointcreds",
"aws/credentials/stscreds",
"aws/defaults",
"aws/ec2metadata",
"aws/endpoints",
"aws/request",
"aws/session",
"aws/signer/v4",
"internal/shareddefaults",
"private/protocol",
"private/protocol/query",
"private/protocol/query/queryutil",
"private/protocol/rest",
"private/protocol/restxml",
"private/protocol/xml/xmlutil",
"service/s3",
"service/sts"
]
revision = "9ed0c8de252f04ac45a65358377103d5a1aa2d92"
version = "v1.12.66"
[[projects]]
branch = "master"
name = "github.com/bgentry/go-netrc"
packages = ["netrc"]
revision = "9fd32a8b3d3d3f9d43c341bfe098430e07609480"
[[projects]]
name = "github.com/bgentry/speakeasy"
packages = ["."]
revision = "4aabc24848ce5fd31929f7d1e4ea74d3709c14cd"
version = "v0.1.0"
[[projects]]
branch = "master"
name = "github.com/bifurcation/mint"
@ -175,12 +74,6 @@
]
revision = "350f685c15fb6b89af795dafe64fad68950948e0"
[[projects]]
name = "github.com/blang/semver"
packages = ["."]
revision = "2ee87856327ba09384cabd113bc6b5d174e9ec0f"
version = "v3.5.1"
[[projects]]
branch = "master"
name = "github.com/brandur/simplebox"
@ -193,18 +86,6 @@
revision = "7cd7992b3bc86f920394f8de92c13900da1a46b7"
version = "v3.2.0"
[[projects]]
branch = "master"
name = "github.com/cjoudrey/gluahttp"
packages = ["."]
revision = "b4bfe0c50fea948dcbf3966e120996d6607bbd89"
[[projects]]
branch = "master"
name = "github.com/cjoudrey/gluaurl"
packages = ["."]
revision = "31cbb9bef199454415879f2e6d609d1136d60cad"
[[projects]]
name = "github.com/coreos/bbolt"
packages = ["."]
@ -241,18 +122,6 @@
revision = "5487b6a5fc12870425fc14d9e05a3fabddd91d7e"
version = "v0.1.0"
[[projects]]
name = "github.com/go-ini/ini"
packages = ["."]
revision = "32e4c1e6bc4e7d0d8451aa6b75200d19e37a536a"
version = "v1.32.0"
[[projects]]
branch = "master"
name = "github.com/go-serve/bindatafs"
packages = ["."]
revision = "1f30d36183f010db5e83986b3554c1a1d9f32d47"
[[projects]]
branch = "master"
name = "github.com/golang/protobuf"
@ -277,57 +146,6 @@
revision = "57e77c5c37da1f4e1af49f9d1fe760f146c1579e"
version = "v0.3.2"
[[projects]]
branch = "master"
name = "github.com/hashicorp/errwrap"
packages = ["."]
revision = "7554cd9344cec97297fa6649b055a8c98c2a1e55"
[[projects]]
branch = "master"
name = "github.com/hashicorp/go-cleanhttp"
packages = ["."]
revision = "d5fe4b57a186c716b0e00b8c301cbd9b4182694d"
[[projects]]
branch = "master"
name = "github.com/hashicorp/go-getter"
packages = [
".",
"helper/url"
]
revision = "961f56d2e93379b7d9c578e998d09257509a6f97"
[[projects]]
branch = "master"
name = "github.com/hashicorp/go-hclog"
packages = ["."]
revision = "ca137eb4b4389c9bc6f1a6d887f056bf16c00510"
[[projects]]
branch = "master"
name = "github.com/hashicorp/go-multierror"
packages = ["."]
revision = "b7773ae218740a7be65057fc60b366a49b538a44"
[[projects]]
branch = "master"
name = "github.com/hashicorp/go-plugin"
packages = ["."]
revision = "1fc09c47b843b73705f51ffb0520e3ac1bfecf99"
[[projects]]
branch = "master"
name = "github.com/hashicorp/go-uuid"
packages = ["."]
revision = "64130c7a86d732268a38cb04cfbaf0cc987fda98"
[[projects]]
branch = "master"
name = "github.com/hashicorp/go-version"
packages = ["."]
revision = "4fe82ae3040f80a03d04d2cccb5606a626b8e1ee"
[[projects]]
branch = "master"
name = "github.com/hashicorp/golang-lru"
@ -337,91 +155,6 @@
]
revision = "0a025b7e63adc15a622f29b0b2c4c3848243bbf6"
[[projects]]
branch = "master"
name = "github.com/hashicorp/hcl"
packages = [
".",
"hcl/ast",
"hcl/parser",
"hcl/scanner",
"hcl/strconv",
"hcl/token",
"json/parser",
"json/scanner",
"json/token"
]
revision = "23c074d0eceb2b8a5bfdbb271ab780cde70f05a8"
[[projects]]
branch = "master"
name = "github.com/hashicorp/hcl2"
packages = [
"gohcl",
"hcl",
"hcl/hclsyntax",
"hcl/json",
"hcldec",
"hclparse"
]
revision = "613331e829930a2321c1de7227c9483d9c76c3f4"
[[projects]]
branch = "master"
name = "github.com/hashicorp/hil"
packages = [
".",
"ast",
"parser",
"scanner"
]
revision = "fa9f258a92500514cc8e9c67020487709df92432"
[[projects]]
name = "github.com/hashicorp/terraform"
packages = [
"config",
"config/configschema",
"config/hcl2shim",
"config/module",
"dag",
"flatmap",
"helper/hashcode",
"helper/hilmapstructure",
"helper/schema",
"moduledeps",
"plugin",
"plugin/discovery",
"registry",
"registry/regsrc",
"registry/response",
"svchost",
"svchost/auth",
"svchost/disco",
"terraform",
"tfdiags",
"version"
]
revision = "a6008b8a48a749c7c167453b9cf55ffd572b9a5d"
version = "v0.11.2"
[[projects]]
branch = "master"
name = "github.com/hashicorp/yamux"
packages = ["."]
revision = "683f49123a33db61abfb241b7ac5e4af4dc54d55"
[[projects]]
branch = "master"
name = "github.com/howeyc/gopass"
packages = ["."]
revision = "bf9dde6d0d2c004a008c27aaee91170c786f6db8"
[[projects]]
name = "github.com/jmespath/go-jmespath"
packages = ["."]
revision = "0b12d6b5"
[[projects]]
name = "github.com/joho/godotenv"
packages = [
@ -443,12 +176,6 @@
packages = ["."]
revision = "ae77be60afb1dcacde03767a8c37337fad28ac14"
[[projects]]
branch = "master"
name = "github.com/kballard/go-shellquote"
packages = ["."]
revision = "cd60e84ee657ff3dc51de0b4f55dd299a3e136f2"
[[projects]]
name = "github.com/klauspost/cpuid"
packages = ["."]
@ -461,42 +188,6 @@
revision = "6bb6130ff6a76a904c1841707d65603aec9cc288"
version = "v1.6"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluaenv"
packages = ["."]
revision = "2888db6bbe38923d59c42e443895875cc8ce0820"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluafs"
packages = ["."]
revision = "01391ed2d7ab89dc80157605b073403f960aa223"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluaquestion"
packages = ["."]
revision = "311437c29ba54d027ad2af383661725ae2bfdcdc"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluassh"
packages = ["."]
revision = "2a7bd48d7568de8230c87ac1ef4a4c481e45814d"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluatemplate"
packages = ["."]
revision = "d9e2c9d6b00f069a9da377a9ac529c827c1c7d71"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluayaml"
packages = ["."]
revision = "6fe413d49d73d785510ecf1529991ab0573e96c7"
[[projects]]
branch = "master"
name = "github.com/kr/fs"
@ -509,12 +200,6 @@
packages = ["."]
revision = "cfb55aafdaf3ec08f0db22699ab822c50091b1c4"
[[projects]]
name = "github.com/kr/pty"
packages = ["."]
revision = "282ce0e5322c82529687d609ee670fac7c7d917c"
version = "v1.1.1"
[[projects]]
branch = "master"
name = "github.com/kr/text"
@ -592,78 +277,18 @@
revision = "035c07716cd373d88456ec4d701402df52584cb4"
version = "v3.0.1"
[[projects]]
name = "github.com/mattn/go-isatty"
packages = ["."]
revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39"
version = "v0.0.3"
[[projects]]
name = "github.com/mattn/go-runewidth"
packages = ["."]
revision = "9e777a8366cce605130a531d2cd6363d07ad7317"
version = "v0.0.2"
[[projects]]
branch = "master"
name = "github.com/mitchellh/cli"
packages = ["."]
revision = "518dc677a1e1222682f4e7db06721942cb8e9e4c"
[[projects]]
branch = "master"
name = "github.com/mitchellh/copystructure"
packages = ["."]
revision = "d23ffcb85de31694d6ccaa23ccb4a03e55c1303f"
[[projects]]
branch = "master"
name = "github.com/mitchellh/go-homedir"
packages = ["."]
revision = "b8bc1bf767474819792c23f32d8286a45736f1c6"
[[projects]]
branch = "master"
name = "github.com/mitchellh/go-testing-interface"
packages = ["."]
revision = "a61a99592b77c9ba629d254a693acffaeb4b7e28"
[[projects]]
branch = "master"
name = "github.com/mitchellh/go-wordwrap"
packages = ["."]
revision = "ad45545899c7b13c020ea92b2072220eefad42b8"
[[projects]]
branch = "master"
name = "github.com/mitchellh/hashstructure"
packages = ["."]
revision = "2bca23e0e452137f789efbc8610126fd8b94f73b"
[[projects]]
branch = "master"
name = "github.com/mitchellh/mapstructure"
packages = ["."]
revision = "b4575eea38cca1123ec2dc90c26529b5c5acfcff"
[[projects]]
branch = "master"
name = "github.com/mitchellh/reflectwalk"
packages = ["."]
revision = "63d60e9d0dbc60cf9164e6510889b0db6683d98c"
[[projects]]
branch = "master"
name = "github.com/mtneug/pkg"
packages = ["ulid"]
revision = "b270c2c35fc775243f87c58cf3f6969c5d9369d6"
[[projects]]
name = "github.com/oklog/run"
packages = ["."]
revision = "4dadeb3030eda0273a12382bb2348ffc7c9d1a39"
version = "v1.0.0"
[[projects]]
name = "github.com/oklog/ulid"
packages = ["."]
@ -676,47 +301,12 @@
packages = ["."]
revision = "96aac992fc8b1a4c83841a6c3e7178d20d989625"
[[projects]]
branch = "master"
name = "github.com/otm/gluaflag"
packages = ["."]
revision = "078088de689148194436293886e8e39809167332"
[[projects]]
branch = "master"
name = "github.com/otm/gluash"
packages = ["."]
revision = "e145c563986f0b91f740a758a84bca46c163aec7"
[[projects]]
name = "github.com/pkg/errors"
packages = ["."]
revision = "645ef00459ed84a119197bfb8d8205042c6df63d"
version = "v0.8.0"
[[projects]]
name = "github.com/pkg/sftp"
packages = ["."]
revision = "f6a9258a0f570c3a76681b897b6ded57cb0dfa88"
version = "1.2.0"
[[projects]]
name = "github.com/posener/complete"
packages = [
".",
"cmd",
"cmd/install",
"match"
]
revision = "dc2bc5a81accba8782bebea28628224643a8286a"
version = "v1.1"
[[projects]]
name = "github.com/satori/go.uuid"
packages = ["."]
revision = "f58768cc1a7a7e77a3bd49e98cdd21419399b6a3"
version = "v1.2.0"
[[projects]]
branch = "master"
name = "github.com/streamrail/concurrent-map"
@ -751,17 +341,6 @@
revision = "db96cdf354e8dc053e5ee5fe890bb0a7f18123ab"
version = "v5.0.0"
[[projects]]
name = "github.com/ulikunitz/xz"
packages = [
".",
"internal/hash",
"internal/xlog",
"lzma"
]
revision = "0c6b41e72360850ca4f98dc341fd999726ea007f"
version = "v0.5.4"
[[projects]]
name = "github.com/xtaci/kcp-go"
packages = ["."]
@ -774,24 +353,6 @@
revision = "ebec7ef2574b42a7088cd7751176483e0a27d458"
version = "v1.0.6"
[[projects]]
branch = "master"
name = "github.com/yookoala/realpath"
packages = ["."]
revision = "d19ef9c409d9817c1e685775e53d361b03eabbc8"
[[projects]]
branch = "master"
name = "github.com/yuin/gluamapper"
packages = ["."]
revision = "d836955830e75240d46ce9f0e6d148d94f2e1d3a"
[[projects]]
branch = "master"
name = "github.com/yuin/gluare"
packages = ["."]
revision = "d7c94f1a80ede93a621ed100866e6d4745ca8c22"
[[projects]]
branch = "master"
name = "github.com/yuin/gopher-lua"
@ -803,20 +364,6 @@
]
revision = "7d7bc8747e3f614c5c587729a341fe7d8903cdb8"
[[projects]]
branch = "master"
name = "github.com/zclconf/go-cty"
packages = [
"cty",
"cty/convert",
"cty/function",
"cty/function/stdlib",
"cty/gocty",
"cty/json",
"cty/set"
]
revision = "709e4033eeb037dc543dbc2048065dfb814ce316"
[[projects]]
name = "go.uber.org/atomic"
packages = ["."]
@ -829,28 +376,15 @@
packages = [
"acme",
"acme/autocert",
"bcrypt",
"blowfish",
"cast5",
"curve25519",
"ed25519",
"ed25519/internal/edwards25519",
"hkdf",
"internal/chacha20",
"nacl/secretbox",
"openpgp",
"openpgp/armor",
"openpgp/elgamal",
"openpgp/errors",
"openpgp/packet",
"openpgp/s2k",
"pbkdf2",
"poly1305",
"salsa20",
"salsa20/salsa",
"ssh",
"ssh/agent",
"ssh/terminal",
"tea",
"twofish",
"xtea"
@ -863,8 +397,6 @@
packages = [
"bpf",
"context",
"html",
"html/atom",
"http2",
"http2/hpack",
"idna",
@ -880,10 +412,7 @@
[[projects]]
branch = "master"
name = "golang.org/x/sys"
packages = [
"unix",
"windows"
]
packages = ["unix"]
revision = "2c42eef0765b9837fbdab12011af7830f55f88f0"
[[projects]]
@ -907,15 +436,6 @@
]
revision = "e19ae1496984b1c655b8044a65c0300a3c878dd3"
[[projects]]
branch = "master"
name = "golang.org/x/tools"
packages = [
"godoc/vfs",
"godoc/vfs/httpfs"
]
revision = "99037e3760ed7d9c772c980caee42b17779b80ce"
[[projects]]
branch = "master"
name = "google.golang.org/genproto"
@ -925,30 +445,8 @@
[[projects]]
name = "google.golang.org/grpc"
packages = [
".",
"balancer",
"balancer/base",
"balancer/roundrobin",
"codes",
"connectivity",
"credentials",
"encoding",
"grpclb/grpc_lb_v1/messages",
"grpclog",
"health",
"health/grpc_health_v1",
"internal",
"keepalive",
"metadata",
"naming",
"peer",
"resolver",
"resolver/dns",
"resolver/passthrough",
"stats",
"status",
"tap",
"transport"
"status"
]
revision = "6b51017f791ae1cfbec89c52efdf444b13b550ef"
version = "v1.9.2"
@ -959,30 +457,12 @@
revision = "947dcec5ba9c011838740e680966fd7087a71d0d"
version = "v2.2.6"
[[projects]]
branch = "v2"
name = "gopkg.in/xmlpath.v2"
packages = ["."]
revision = "860cbeca3ebcc600db0b213c0e83ad6ce91f5739"
[[projects]]
branch = "v2"
name = "gopkg.in/yaml.v2"
packages = ["."]
revision = "d670f9405373e636a5a2765eea47fac0c9bc91a4"
[[projects]]
branch = "master"
name = "layeh.com/asar"
packages = ["."]
revision = "0ec214a4ae0d21fa761591e8cb8f0bbf162ef3e5"
[[projects]]
branch = "master"
name = "layeh.com/gopher-json"
packages = ["."]
revision = "1aab82196e3b418b56866938f28b6a693f2c6b18"
[[projects]]
name = "layeh.com/gopher-luar"
packages = ["."]
@ -992,6 +472,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "d000c14171581755a2ee37649cb969d019d9c197928028a87e83e3ec729421aa"
inputs-digest = "00989279709617d81211e843f6f2803e3af15e1a363c2976d11a5f9d78e0e040"
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -1,69 +0,0 @@
package main
import (
"context"
"flag"
"fmt"
"log"
"net/http"
"os"
"runtime"
"github.com/kr/pretty"
"go.uber.org/atomic"
)
var (
hits *atomic.Int64
)
func init() {
hits = atomic.NewInt64(0)
}
func demoServerHandler(msg string) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintln(w, "Route is go!")
fmt.Fprintf(w, "%s\n", pretty.Sprintf("%s", r.Header))
hn, _ := os.Hostname()
fmt.Fprintf(w, "message: %s\n", msg)
fmt.Fprintf(w, "Served by %s running %s\n", hn, runtime.GOOS)
fmt.Fprintf(w, "Hit count: %d", hits.Inc())
ip := r.Header.Get("X-Remote-Ip")
if ip != "" {
log.Printf("Hit from %s: %s", ip, r.RequestURI)
}
})
}
func demoServer(ctx context.Context, args []string) error {
fs := flag.NewFlagSet("server", flag.ContinueOnError)
addr := fs.String("addr", ":9090", "http address to listen on")
msg := fs.String("msg", "now here's a little lesson in trickery...", "custom message to add to each page render")
err := fs.Parse(args)
if err != nil {
return err
}
hs := &http.Server{
Addr: *addr,
Handler: demoServerHandler(*msg),
}
go hs.ListenAndServe()
log.Printf("listening on %s", *addr)
for {
select {
case <-ctx.Done():
sctx := context.Background()
hs.Shutdown(sctx)
return nil
}
}
}

View File

@ -1,124 +0,0 @@
package main
import (
"context"
"flag"
"log"
"net/http"
"os"
"path/filepath"
"git.xeserv.us/xena/route/internal/gluaroute"
edata "git.xeserv.us/xena/route/proto/eclier"
"github.com/Xe/eclier"
"github.com/Xe/gluanetrc"
"github.com/Xe/x/tools/glue/libs/gluaexpect"
"github.com/Xe/x/tools/glue/libs/gluasimplebox"
"github.com/ailncode/gluaxmlpath"
"github.com/cjoudrey/gluahttp"
"github.com/cjoudrey/gluaurl"
"github.com/go-serve/bindatafs"
"github.com/kohkimakimoto/gluaenv"
"github.com/kohkimakimoto/gluafs"
"github.com/kohkimakimoto/gluaquestion"
"github.com/kohkimakimoto/gluassh"
"github.com/kohkimakimoto/gluatemplate"
"github.com/kohkimakimoto/gluayaml"
homedir "github.com/mitchellh/go-homedir"
"github.com/otm/gluaflag"
"github.com/otm/gluash"
"github.com/yuin/gluare"
lua "github.com/yuin/gopher-lua"
"golang.org/x/tools/godoc/vfs/httpfs"
json "layeh.com/gopher-json"
)
var hDir string
var cfgHome *string
var netrcFile *string
var defaultServer *string
func init() {
dir, err := homedir.Dir()
if err != nil {
log.Fatal(err)
}
hDir = dir
cfgHome = flag.String("home", filepath.Join(hDir, ".construct"), "construct's home directory")
netrcFile = flag.String("netrc", filepath.Join(hDir, ".netrc"), "location of netrc file to use for authentication")
defaultServer = flag.String("default-server", "https://api.route.xeserv.us:7268", "api server to connect to")
log.SetFlags(log.LstdFlags | log.Llongfile)
}
func main() {
flag.Parse()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
pluginLoc := filepath.Join(*cfgHome, "plugins")
scriptsLoc := filepath.Join(*cfgHome, "local", "scripts")
os.MkdirAll(pluginLoc, 0755)
os.MkdirAll(scriptsLoc, 0755)
if _, err := os.Stat(*netrcFile); err != nil {
log.Printf("creating netrc file...")
fout, err := os.Create(*netrcFile)
if err != nil {
log.Fatal(err)
}
fout.Close()
}
efs := bindatafs.New("core://", edata.Asset, edata.AssetDir, edata.AssetInfo)
opts := []eclier.RouterOption{
eclier.WithGluaCreationHook(preload),
eclier.WithScriptHome(scriptsLoc),
eclier.WithFilesystem("/bindata:core/", httpfs.New(efs)),
}
err := filepath.Walk(pluginLoc, func(path string, info os.FileInfo, err error) error {
if info.IsDir() {
opts = append(opts, eclier.WithScriptHome(info.Name()))
}
return nil
})
if err != nil {
log.Fatal(err)
}
r, err := eclier.NewRouter(opts...)
if err != nil {
log.Fatal(err)
}
r.AddCommand(eclier.NewBuiltinCommand("server", "spawns a http server for testing", "[-addr host:port|-msg \"some message\"]", demoServer))
r.Run(ctx, flag.Args())
}
func preload(L *lua.LState) {
L.PreloadModule("re", gluare.Loader)
L.PreloadModule("sh", gluash.Loader)
L.PreloadModule("fs", gluafs.Loader)
L.PreloadModule("env", gluaenv.Loader)
L.PreloadModule("yaml", gluayaml.Loader)
L.PreloadModule("question", gluaquestion.Loader)
L.PreloadModule("ssh", gluassh.Loader)
L.PreloadModule("http", gluahttp.NewHttpModule(&http.Client{}).Loader)
L.PreloadModule("flag", gluaflag.Loader)
L.PreloadModule("template", gluatemplate.Loader)
L.PreloadModule("url", gluaurl.Loader)
gluaexpect.Preload(L)
gluasimplebox.Preload(L)
gluaxmlpath.Preload(L)
json.Preload(L)
gluanetrc.Preload(L)
gluaroute.Preload(L)
}

View File

@ -1,54 +0,0 @@
package main
import (
"log"
"net/http"
"git.xeserv.us/xena/route/proto/route"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/plugin"
"github.com/hashicorp/terraform/terraform"
)
func main() {
plugin.Serve(&plugin.ServeOpts{
ProviderFunc: provider,
})
}
// provider returns a terraform.ResourceProvider.
func provider() terraform.ResourceProvider {
return &schema.Provider{
Schema: map[string]*schema.Schema{
"token": &schema.Schema{
Type: schema.TypeString,
Optional: true,
DefaultFunc: schema.EnvDefaultFunc("ROUTE_TOKEN", nil),
},
"host": &schema.Schema{
Type: schema.TypeString,
Optional: false,
Required: true,
DefaultFunc: schema.EnvDefaultFunc("ROUTE_HOST", nil),
},
},
ResourcesMap: map[string]*schema.Resource{
"route_route": routeResource(),
"route_token": tokenResource(),
},
ConfigureFunc: providerConfigure,
}
}
func providerConfigure(d *schema.ResourceData) (interface{}, error) {
token := d.Get("token").(string)
host := d.Get("host").(string)
log.Printf("[INFO] Initializing route client connecting to %s", host)
cl := route.New(host, token, &http.Client{})
return cl, nil
}

View File

@ -1,91 +0,0 @@
package main
import (
"context"
"log"
proto "git.xeserv.us/xena/route/proto"
"git.xeserv.us/xena/route/proto/route"
"github.com/hashicorp/terraform/helper/schema"
)
func routeResource() *schema.Resource {
return &schema.Resource{
Create: resourceRouteCreate,
Read: resourceRouteRead,
Delete: resourceRouteDelete,
Exists: resourceRouteExists,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},
Schema: map[string]*schema.Schema{
"host": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
},
}
}
func resourceRouteCreate(d *schema.ResourceData, meta interface{}) error {
cli := meta.(*route.Client)
rt, err := cli.Routes.Put(context.Background(), &proto.Route{
Host: d.Get("host").(string),
})
if err != nil {
return err
}
log.Printf("[INFO] created route for host %s with ID %s", rt.Host, rt.Id)
return nil
}
func resourceRouteDelete(d *schema.ResourceData, meta interface{}) error {
cli := meta.(*route.Client)
rt, err := cli.Routes.Get(context.Background(), &proto.GetRouteRequest{Id: d.Id()})
if err != nil {
return err
}
_, err = cli.Routes.Delete(context.Background(), rt)
if err != nil {
return err
}
log.Printf("[INFO] deleted route for host %s with ID %s", rt.Host, rt.Id)
return nil
}
func resourceRouteExists(d *schema.ResourceData, meta interface{}) (bool, error) {
cli := meta.(*route.Client)
_, err := cli.Routes.Get(context.Background(), &proto.GetRouteRequest{Id: d.Id()})
if err != nil {
return false, err
}
return true, nil
}
func resourceRouteRead(d *schema.ResourceData, meta interface{}) error {
cli := meta.(*route.Client)
rt, err := cli.Routes.Get(context.Background(), &proto.GetRouteRequest{Id: d.Id()})
if err != nil {
return err
}
d.SetId(rt.Id)
d.Set("host", rt.Host)
d.Set("creator", rt.Creator)
return nil
}

View File

@ -1,99 +0,0 @@
package main
import (
"context"
"log"
proto "git.xeserv.us/xena/route/proto"
"git.xeserv.us/xena/route/proto/route"
"github.com/hashicorp/terraform/helper/schema"
)
func tokenResource() *schema.Resource {
return &schema.Resource{
Create: resourceTokenCreate,
Read: resourceTokenRead,
Delete: resourceTokenDelete,
Exists: resourceTokenExists,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},
Schema: map[string]*schema.Schema{
"scopes": {
Type: schema.TypeList,
Required: true,
ForceNew: true,
},
},
}
}
func resourceTokenCreate(d *schema.ResourceData, meta interface{}) error {
cli := meta.(*route.Client)
var scps []string
for _, val := range d.Get("scopes").([]interface{}) {
sc, ok := val.(string)
if !ok {
log.Printf("[INFO] can't decode %#v", sc)
continue
}
scps = append(scps, sc)
}
tok, err := cli.Tokens.Put(context.Background(), &proto.Token{Scopes: scps})
if err != nil {
return err
}
log.Printf("[INFO] created token with scopes %v and ID %s", tok.Scopes, tok.Id)
return nil
}
func resourceTokenDelete(d *schema.ResourceData, meta interface{}) error {
cli := meta.(*route.Client)
tok, err := cli.Tokens.Get(context.Background(), &proto.GetTokenRequest{Id: d.Id()})
if err != nil {
return err
}
_, err = cli.Tokens.Deactivate(context.Background(), tok)
if err != nil {
return err
}
return nil
}
func resourceTokenExists(d *schema.ResourceData, meta interface{}) (bool, error) {
cli := meta.(*route.Client)
_, err := cli.Tokens.Get(context.Background(), &proto.GetTokenRequest{Id: d.Id()})
if err != nil {
return false, err
}
return true, nil
}
func resourceTokenRead(d *schema.ResourceData, meta interface{}) error {
cli := meta.(*route.Client)
tok, err := cli.Tokens.Get(context.Background(), &proto.GetTokenRequest{Id: d.Id()})
if err != nil {
return err
}
d.SetId(tok.Id)
d.Set("body", tok.Body)
d.Set("active", tok.Active)
d.Set("scopes", tok.Scopes)
return nil
}

View File

@ -27,13 +27,11 @@ func init() {
wd = lwd
arches = []string{"amd64", "ppc64", "386", "arm", "arm64"}
bins = []string{"route-httpagent", "route-cli", "routed", "terraform-provider-route", "construct"}
bins = []string{"route-httpagent", "routectl", "routed"}
tools = []string{
"github.com/golang/dep/cmd/dep",
"github.com/golang/protobuf/protoc-gen-go",
"github.com/twitchtv/twirp/protoc-gen-twirp",
"github.com/Xe/twirp-codegens/cmd/protoc-gen-twirp_eclier",
"github.com/jteeuwen/go-bindata/go-bindata",
}
}
@ -178,7 +176,6 @@ func Generate(ctx context.Context) {
dir := filepath.Join(wd, "proto")
shouldWork(ctx, nil, dir, "sh", "./regen.sh")
shouldWork(ctx, nil, filepath.Join(dir, "eclier"), "go-bindata", "-pkg", "edata", "-ignore", "bindata.go", ".")
}
// Vars shows the various variables that this magefile uses.
@ -191,6 +188,7 @@ func Vars() {
table.Append([]string{"bins", fmt.Sprint(bins)})
table.Append([]string{"goarch", runtime.GOARCH})
table.Append([]string{"goos", runtime.GOOS})
table.Append([]string{"gover", runtime.Version()})
table.Append([]string{"tools", fmt.Sprint(tools)})
table.Append([]string{"wd", wd})

View File

@ -1,465 +0,0 @@
// Code generated by go-bindata.
// sources:
// route_twirp_eclier_backends_kill.lua
// route_twirp_eclier_backends_list.lua
// route_twirp_eclier_routes_delete.lua
// route_twirp_eclier_routes_get.lua
// route_twirp_eclier_routes_get_all.lua
// route_twirp_eclier_routes_put.lua
// route_twirp_eclier_tokens_deactivate.lua
// route_twirp_eclier_tokens_delete.lua
// route_twirp_eclier_tokens_get.lua
// route_twirp_eclier_tokens_get_all.lua
// route_twirp_eclier_tokens_put.lua
// DO NOT EDIT!
package edata
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info os.FileInfo
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _route_twirp_eclier_backends_killLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x5c\x52\x4d\x6b\xdc\x30\x10\xbd\xeb\x57\x3c\x74\xb2\x21\x36\xdb\x43\x2f\x0b\xbe\xb4\xc9\x21\x50\x1a\x28\xe9\xa9\x84\xa2\x95\xc7\xb6\x58\xaf\xe4\xce\x48\x4e\xfa\xef\x8b\xe4\xec\x47\x73\x30\x58\x7a\xef\xcd\x7b\x33\xa3\xa6\xc1\xd7\xd0\x13\x46\xf2\xc4\x26\x52\x8f\xc3\x5f\x2c\x1c\x62\xb0\xcd\x48\xbe\x89\xaf\x8e\x97\xdf\x64\x67\x47\x8c\xf5\x73\xbb\x6b\x77\xf7\x4f\xf8\xfe\xf4\x8c\x87\xfb\xc7\x67\xd5\x34\x90\x90\xd8\xd2\x1e\x1c\x52\xa4\xb6\x48\x95\x12\xcb\x6e\x89\xed\x4a\x7c\x40\x07\x7d\x30\xf6\x48\xbe\x97\xfd\xd1\xcd\xb3\x3e\xa3\x13\xcd\x4b\x46\x1f\xde\xc8\xa6\x48\x82\x13\xc5\x29\xf4\xc8\x24\x04\x0f\x21\x5e\x9d\x25\x9c\xd5\x18\x02\xa3\x04\xc2\x62\xec\xd1\x8c\x84\x37\xca\xa4\x36\x49\xbb\xd9\x9f\xa9\x17\x0f\x93\xe2\x14\x38\xbb\x9c\x8c\x9d\x9c\xa7\xe6\xd2\xa9\xbe\x49\x29\x2e\xf8\x4c\xda\x3a\xbc\x20\x49\xb2\x49\x07\xad\x95\x9a\x83\x35\x33\x86\xd9\x8c\xe8\xc0\xf4\x27\x39\x26\xe8\x7c\xd6\xef\x98\xac\xf6\x16\x92\xd5\x5e\x65\x82\xae\x68\x5b\x4f\xaf\x55\xad\xf2\xe0\xf2\x71\xeb\xe9\xcb\x96\xfa\xb1\x57\x83\xec\x25\xb2\xf3\x63\xa5\x5d\xaf\xef\xa0\xf3\xb7\x9a\x39\x51\x21\x9e\x48\x4a\x22\xc3\x23\x5c\xaf\x6b\xf5\x31\xe8\x20\xfb\xf2\x9b\x2d\x86\xe4\x6d\xcc\x7d\x71\xf2\x95\xe1\xb1\x56\x80\x1b\xb2\xf6\xd7\xa7\x17\x74\x1d\x74\x93\x37\xa0\x11\xf8\xbf\xcb\xf7\xdb\x38\x91\x57\x00\xb0\xb0\xf3\xb1\xba\x56\xae\xcb\x2d\x53\x4c\x9c\x09\xe4\x7b\xa5\x50\x2a\xec\x5e\xd0\xe1\x66\xf5\x0a\xb8\x4e\x4d\xb6\x78\x8b\x61\xa1\x2d\xce\x05\x66\x92\xfc\x10\x64\xb5\x65\x3c\x66\x71\x3f\x7f\x7c\xbb\x43\x0c\x47\xf2\xf5\xfe\xbc\xd3\xaa\x2e\xcf\xa7\x2a\xc5\x6a\xa5\xb2\xf1\xbf\x00\x00\x00\xff\xff\x9b\x1b\x96\x37\xbf\x02\x00\x00")
func route_twirp_eclier_backends_killLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_backends_killLua,
"route_twirp_eclier_backends_kill.lua",
)
}
func route_twirp_eclier_backends_killLua() (*asset, error) {
bytes, err := route_twirp_eclier_backends_killLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_backends_kill.lua", size: 703, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_backends_listLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x7c\x52\xc1\x6e\xdb\x3a\x10\xbc\xf3\x2b\x06\x3c\x59\x40\x24\xe4\x1d\xde\x45\x80\x2e\x6d\x72\x28\x50\x34\x40\x9b\x9e\x9a\xa0\xa0\xa9\x95\x4c\x44\x26\xd5\x5d\x52\x49\xff\xbe\x20\x65\xd9\x6e\x81\xf6\x60\xc0\xdc\x99\xd9\x9d\x5d\x4d\x5d\xe3\x7d\xe8\x09\x23\x79\x62\x13\xa9\xc7\xfe\x27\x66\x0e\x31\xd8\x7a\x24\x5f\xc7\x57\xc7\xf3\x77\xb2\x93\x23\xc6\xf2\x7f\x73\xdb\xdc\xde\x3d\xe0\xd3\xc3\x23\xee\xef\x3e\x3c\xaa\xba\x86\x84\xc4\x96\x5a\x70\x48\x91\x9a\x22\x55\x4a\x2c\xbb\x39\x36\x0b\xf1\x1e\x1d\xf4\xde\xd8\x17\xf2\xbd\xb4\x93\x93\xa8\x37\xf4\x40\xd3\x9c\xd1\xfb\x37\xb2\x29\x92\xe0\x48\xf1\x10\x7a\x64\x12\x82\x87\x10\x2f\xce\x12\x36\x35\x86\xc0\x28\x86\x30\x1b\xfb\x62\x46\xc2\x1b\x65\x52\x93\xa4\x59\xc7\x6f\xd4\xf3\x0c\x93\xe2\x21\x70\x9e\x72\x34\xf6\xe0\x3c\xd5\xe7\x4d\xf5\x95\x4b\x71\xc1\x67\xd2\xba\xe1\x19\x49\x92\x87\x74\xd0\x5a\xa9\x29\x58\x33\x61\x98\xcc\x88\x0e\x4c\x3f\x92\x63\x82\xce\x6f\x7d\xc2\x64\xb1\xd7\x90\x2c\xf6\x22\x13\x74\x45\xdb\x78\x7a\xdd\x55\x2a\x1f\x2e\x3f\xd7\x9d\xde\xad\xae\xbf\xd0\x44\x36\x06\x56\x83\xb4\x12\xd9\xf9\x71\xa7\xfb\x70\x34\xce\xeb\x1b\xe8\xfc\x5b\xcc\x94\xa8\x48\x8e\x24\xc5\x9b\xe1\x11\x27\x4e\x75\xad\x4b\x42\xfc\x6f\x55\x61\x54\xea\xcf\x55\x07\x69\xcb\xdf\x6c\x72\x48\xde\xc6\x7c\x19\x4e\x7e\x67\x78\xac\x14\xe0\x86\xac\xfe\xf6\xdf\x33\xba\x0e\xba\xce\xdf\x50\x23\xf0\x6f\xc5\x53\x35\x1e\xc8\x2b\x00\x98\xd9\xf9\xb8\xbb\x74\xae\x4a\x95\x29\x26\xce\x04\xf2\xbd\x52\x28\x1d\x6e\x9f\xd1\xe1\x2a\x3c\x0a\xb8\xdc\x5d\x56\x7b\xb3\x61\xa1\xd5\xce\x19\x66\x92\x1c\x25\x59\x6c\x39\xb0\x99\xdd\xd7\xcf\x1f\x6f\x10\xc3\x0b\xf9\xaa\xdd\x52\xb1\xab\x4a\x00\x77\xa5\x59\x51\xaf\xce\xf4\x5e\xda\xa7\xf8\x14\x35\x9a\x06\x31\x9c\x6e\x98\x7b\x36\x7b\x29\x6e\x37\xde\x96\xe3\xbf\xb1\x4f\x78\x55\xa9\xbc\xd4\xaf\x00\x00\x00\xff\xff\x5b\x4a\x7f\xf6\x5d\x03\x00\x00")
func route_twirp_eclier_backends_listLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_backends_listLua,
"route_twirp_eclier_backends_list.lua",
)
}
func route_twirp_eclier_backends_listLua() (*asset, error) {
bytes, err := route_twirp_eclier_backends_listLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_backends_list.lua", size: 861, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_routes_deleteLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x7c\x52\x4f\x6b\xdc\x3e\x10\xbd\xeb\x53\x3c\x74\xb2\x21\x36\xfb\x3b\xfc\x2e\x06\x9f\x9a\x1c\x0a\xa5\x81\x90\x9e\x4a\x28\x8a\x3c\xb6\x45\xbc\x92\x3b\x23\x39\xe9\xb7\x2f\x92\x37\xbb\x9b\x1e\x72\x30\x58\xf3\xde\x9b\x79\xf3\xa7\x69\xf0\x25\x0c\x84\x89\x3c\xb1\x89\x34\xe0\xf9\x0f\x56\x0e\x31\xd8\x66\x22\xdf\xc4\x57\xc7\xeb\x2f\xb2\x8b\x23\xc6\xf6\x7f\x7b\x68\x0f\xb7\xf7\xf8\x7e\xff\x88\xbb\xdb\xaf\x8f\xaa\x69\x20\x21\xb1\xa5\x0e\x1c\x52\xa4\xb6\x48\x95\x12\xcb\x6e\x8d\xed\x46\xfc\x8c\x1e\xba\x60\xd2\x0d\xb4\x50\x24\xfd\x8e\xce\xb4\xac\x19\xbd\x7b\x23\x9b\x71\x1c\x29\xce\x61\xc0\x4e\x43\xf0\x10\xe2\xcd\x59\xda\x73\x0b\xc6\xc0\x28\x86\xb0\x1a\xfb\x62\x26\xc2\x1b\x65\x4a\x9b\xa4\xdd\xcb\xef\xc4\x73\x05\x93\xe2\x1c\x38\xd7\x38\x1a\x3b\x3b\x4f\xcd\xb9\x4f\x7d\xe5\x51\x5c\xf0\x99\xb4\xf7\x77\x46\x92\xe4\x12\x3d\xb4\x56\x6a\x09\xd6\x2c\x18\x17\x33\xa1\x07\xd3\xef\xe4\x98\xa0\xf3\x5b\x9f\x30\xd9\xec\x35\x24\x9b\xbd\xc8\x04\x7d\xd1\xb6\x9e\x5e\xab\x5a\xe5\xb1\xe5\xe7\xde\xd1\x43\xf6\xac\x46\xe9\x24\xb2\xf3\x53\xa5\xdd\xa0\x6f\xa0\xf3\xb7\x99\x25\x51\x21\x1d\x49\x8a\x1b\xc3\x13\xdc\xa0\xeb\x6b\xbe\x65\x32\x31\xf0\xe7\xa2\x77\xd2\x07\xe5\x1c\x24\x7e\x2e\x2b\x8c\x5a\xfd\x3b\x92\x51\xba\xf2\x9b\x9b\x19\x93\xb7\x31\x4f\x90\x93\xaf\x0c\x4f\xb5\x02\xdc\x98\xd5\x3f\xff\x7b\x42\xdf\x43\x37\x79\xd3\x1a\x81\x3f\x04\x4f\xd1\x38\x93\x57\x00\xb0\xb2\xf3\xb1\xba\x64\xae\x4b\x94\x29\x26\xce\x04\xf2\x83\x52\x28\x19\x0e\x4f\xe8\x71\x75\x62\x0a\xb8\xec\x47\x76\x7b\xab\x61\xa1\xdd\xce\x19\x66\x92\x7c\x70\xb2\xd9\xb2\x08\xb3\xba\x1f\x0f\xdf\x6e\x10\xc3\x0b\xf9\xba\xdb\x6f\xa7\xaa\x4f\x67\x5a\x95\x64\xb5\x52\xb9\xf0\xdf\x00\x00\x00\xff\xff\x75\x1a\x14\x61\x27\x03\x00\x00")
func route_twirp_eclier_routes_deleteLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_routes_deleteLua,
"route_twirp_eclier_routes_delete.lua",
)
}
func route_twirp_eclier_routes_deleteLua() (*asset, error) {
bytes, err := route_twirp_eclier_routes_deleteLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_routes_delete.lua", size: 807, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_routes_getLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x84\x52\x4d\x6b\xdc\x30\x10\xbd\xeb\x57\x3c\x74\x5a\x43\x6c\xd2\x43\x2f\x06\x9f\x9a\x50\x0a\xa5\x81\x90\x9e\x9a\x50\x14\x79\x6c\x8b\x38\x92\x33\x92\x9c\xf4\xdf\x97\x91\xdd\xdd\x6d\x21\xf4\xb0\xb0\x9a\xf7\x31\x6f\xc6\x53\xd7\xf8\x14\x7a\xc2\x48\x9e\xd8\x24\xea\xf1\xf8\x0b\x0b\x87\x14\x6c\x3d\x92\xaf\xd3\xab\xe3\xe5\x27\xd9\xd9\x11\x63\xfd\xd8\x5c\x36\x97\x57\x37\xf8\x76\x73\x87\xeb\xab\x2f\x77\xaa\xae\x11\x43\x66\x4b\x2d\x38\xe4\x44\x4d\x91\x2a\x15\x2d\xbb\x25\x35\x2b\xf1\x23\x3a\xe8\x82\xc5\x76\xa4\xa4\xff\x40\x13\xcd\x8b\x40\xd7\x6f\x64\x05\xc4\x33\xa5\x29\xf4\x18\x29\x21\x78\x44\xe2\xd5\x59\xda\x5c\x23\x86\xc0\x28\x51\xb0\x18\xfb\x64\x46\xc2\x1b\x09\xa5\xc9\xb1\xd9\x1a\x6f\xc4\xa3\xbd\xc9\x69\x0a\x2c\x0d\x9e\x8d\x9d\x9c\xa7\xfa\x38\xa1\x3e\x4b\x17\x5d\xf0\x42\xda\x26\x3b\x22\x39\x4a\x8b\x0e\x5a\x2b\x35\x07\x6b\x66\x0c\xb3\x19\xd1\x81\xe9\x25\x3b\x26\x68\x79\xeb\x1d\x8b\xab\x3d\x87\xe2\x6a\x4f\xb2\x88\xae\x68\x1b\x4f\xaf\x87\x4a\xc9\xc2\xe4\xb9\x4d\xf4\x99\xd2\xad\xc4\xbe\xa5\x97\x4c\x31\xa9\x21\xb6\x31\xb1\xf3\xe3\x41\x67\x9f\x23\xf5\xfa\x02\x5a\x7e\xab\x99\x33\x15\xc9\x33\xc5\x92\xcd\xf0\x88\x9d\x53\x9d\xeb\xdc\x7f\x34\x4e\xf8\xff\x8e\x39\xc4\xb6\xfc\x95\x80\x43\xf6\x36\xc9\x56\x38\xfb\x83\xe1\xb1\x52\x80\x1b\x44\xfb\xe3\xc3\x03\xba\x0e\xba\x96\x4f\xa7\x11\xf8\xaf\xe2\x5e\x4d\x13\x79\x05\x00\x0b\x3b\x9f\x0e\x27\xe7\xaa\x54\x99\x52\x66\x21\x90\xef\x95\x42\x71\xb8\x7c\x40\x87\xb3\x83\x51\xc0\x69\xe7\x71\x8b\xb7\x18\x8e\xb4\xc5\x39\xc2\x4c\x51\x2e\x28\xae\xb6\x2c\xd7\x2c\xee\xfb\xed\xd7\x0b\xa4\xf0\x44\xbe\x6a\xb7\x7b\x38\x54\x72\x74\x87\xe2\x54\xa4\x5b\x2c\xed\xfa\xf6\x3e\xdd\x27\x8d\xa6\x41\x0a\xfb\xf2\xc4\xb0\x71\x7d\x89\xba\xf3\x2c\x93\x49\x81\xdf\x23\xef\xf0\xb9\x62\x0a\x31\xbd\x47\x17\xac\xaa\x94\xcc\xfe\x3b\x00\x00\xff\xff\x07\xd9\x95\x3a\x78\x03\x00\x00")
func route_twirp_eclier_routes_getLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_routes_getLua,
"route_twirp_eclier_routes_get.lua",
)
}
func route_twirp_eclier_routes_getLua() (*asset, error) {
bytes, err := route_twirp_eclier_routes_getLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_routes_get.lua", size: 888, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_routes_get_allLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x5c\x52\x4d\x6f\xdd\x20\x10\xbc\xf3\x2b\x46\x9c\x6c\xa9\xb6\x5e\x0f\xbd\x3c\xc9\xa7\x26\x87\x4a\x55\x22\x55\xe9\xa9\x89\x22\xc2\x5b\xdb\x28\x0e\xb8\xcb\xe2\xa4\xff\xbe\x02\xde\x57\x7b\x83\x9d\xd9\xd9\x19\x96\xae\xc3\xd7\x70\x20\x4c\xe4\x89\x8d\xd0\x01\x2f\x7f\xb0\x72\x90\x60\xbb\x89\x7c\x27\xef\x8e\xd7\x67\xb2\x8b\x23\xc6\xf6\xa5\xdf\xf5\xbb\x9b\x7b\xdc\xdd\x3f\xe0\xf6\xe6\xdb\x83\xea\x3a\xc4\x90\xd8\xd2\x1e\x1c\x92\x50\x5f\x5a\x95\x8a\x96\xdd\x2a\xfd\x46\xfc\x82\x01\xba\x60\x71\x3f\x91\x3c\x9b\x65\xd1\x27\x78\xa6\x65\xcd\xf0\xed\x07\xd9\x4c\xc0\x1b\xc9\x1c\x0e\x38\xf2\x10\x3c\x22\xf1\xe6\x2c\x55\xf5\x88\x31\x30\x8a\x25\xac\xc6\xbe\x9a\x89\xf0\x41\x99\xd2\xa7\xd8\x57\x03\x95\x78\x1e\x61\x92\xcc\x81\xf3\x90\x37\x63\x67\xe7\xa9\x3b\x27\xd5\x57\x2e\xa3\x0b\x3e\x93\x6a\xc2\x33\x92\x62\x1e\x31\x40\x6b\xa5\x96\x60\xcd\x82\x71\x31\x13\x06\x30\xfd\x4e\x8e\x09\x3a\xdf\xf5\x11\x8b\x9b\xbd\x86\xe2\x66\x2f\x6d\x11\x43\xe9\xed\x3d\xbd\x37\xad\xca\x0f\x97\xaf\x35\xd1\x9d\x5b\xd4\xff\x23\xc7\xb8\x2f\xc7\x4c\x1e\x93\xb7\x92\x1d\x72\xf2\x8d\xe1\xa9\x55\x80\x1b\x61\x78\xfa\xf5\xf9\x09\xc3\x00\xdd\xe5\xa7\xd4\x08\xfc\x4f\xf1\x58\x95\x99\xbc\x02\x80\x95\x9d\x97\xe6\xa2\xdc\x96\x2a\x93\x24\xce\x04\xf2\x07\xa5\x50\x14\x76\x4f\x18\x70\xb5\x44\x05\x5c\xf2\xc7\x6a\x6f\x35\x1c\xa9\xda\x39\xc3\x4c\x31\x6f\x34\x6e\xb6\x04\x35\xab\xfb\xf9\xe3\xfb\x27\x48\x78\x25\xdf\xee\xeb\x6e\x9a\xf6\xf4\x11\x9a\xa2\x56\xda\xab\xb5\xd3\x47\x79\x94\x47\xd1\xe8\x7b\x48\x88\xc2\xce\x4f\x4d\x16\x3e\xee\xb6\x6d\x55\x76\xfa\x37\x00\x00\xff\xff\xd4\xa1\xe0\x99\xba\x02\x00\x00")
func route_twirp_eclier_routes_get_allLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_routes_get_allLua,
"route_twirp_eclier_routes_get_all.lua",
)
}
func route_twirp_eclier_routes_get_allLua() (*asset, error) {
bytes, err := route_twirp_eclier_routes_get_allLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_routes_get_all.lua", size: 698, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_routes_putLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x7c\x52\xcd\x6a\xdc\x3c\x14\xdd\xeb\x29\x0e\x5a\x8d\x21\x36\xf9\x16\xdf\xc6\xe0\x55\x93\x45\xa1\x34\x10\xd2\x55\x13\x8a\x22\x5f\xdb\x22\x8e\xe4\x5e\x49\x4e\xfa\xf6\xe5\xca\xce\xcc\xa4\x90\x2c\x0c\x96\xce\x8f\xce\xfd\xa9\x6b\x7c\x09\x3d\x61\x24\x4f\x6c\x12\xf5\x78\xfc\x83\x85\x43\x0a\xb6\x1e\xc9\xd7\xe9\xc5\xf1\xf2\x8b\xec\xec\x88\xb1\xfe\xdf\x5c\x36\x97\x57\x37\xf8\x7e\x73\x87\xeb\xab\xaf\x77\xaa\xae\x11\x43\x66\x4b\x2d\x38\xe4\x44\x4d\x91\x2a\x15\x2d\xbb\x25\x35\x2b\xf1\x23\x3a\xe8\x82\xc5\x76\xc9\x49\xbf\x41\x13\xcd\x8b\x40\xd7\xaf\x64\x05\xc4\x33\xa5\x29\xf4\x58\x72\x42\xf0\x88\xc4\xab\xb3\xb4\xb9\x46\x0c\x81\x51\xa2\x60\x31\xf6\xc9\x8c\x84\x57\x12\x4a\x93\x63\xb3\x3d\xbc\x11\x8f\xf6\x26\xa7\x29\xb0\x3c\xf0\x6c\xec\xe4\x3c\xd5\xc7\x0a\xf5\x59\xba\xe8\x82\x17\xd2\x56\xd9\x11\xc9\x51\x9e\xe8\xa0\xb5\x52\x73\xb0\x66\xc6\x30\x9b\x11\x1d\x98\x7e\x67\xc7\x04\x2d\x67\xbd\x63\x71\xb5\xe7\x50\x5c\xed\x49\x16\xd1\x15\x6d\xe3\xe9\xe5\x50\x29\x69\x98\x1c\xb7\x8a\x6e\x25\xb3\x1a\x62\x1b\x13\x3b\x3f\x1e\xf4\x14\x62\xd2\x17\xd0\xf2\xad\x66\xce\x54\x68\xcf\x14\x4b\x1e\xc3\x23\x0a\xa3\x3a\xd7\xb8\xfe\x73\x85\xeb\xdf\xf3\x2d\x93\x49\x81\x3f\x17\xbd\x91\x2a\xf5\x6f\x4b\x86\xd8\x96\x5f\x29\x66\xc8\xde\x26\xe9\x20\x67\x7f\x30\x3c\x56\x0a\x70\x83\x18\xfc\xfc\xef\x01\x5d\x07\x5d\xcb\x98\x35\x02\xbf\xbb\xdc\x6f\xd3\x44\x5e\x01\xc0\xc2\xce\xa7\xc3\xc9\xb9\x2a\xb7\x4c\x29\xb3\x10\xc8\xf7\x4a\xa1\x38\x5c\x3e\xa0\xc3\xd9\x72\x29\xe0\x34\x9f\xb8\xc5\x5b\x0c\x47\xda\xe2\x1c\x61\xa6\x28\xdb\x16\x57\x5b\x06\x61\x16\xf7\xe3\xf6\xdb\x05\x52\x78\x22\x5f\xb5\xdb\xee\x1c\x2a\x59\xd0\x43\x71\x2a\xd2\x2d\x96\x76\x7d\x7b\x9f\xee\x93\x46\xd3\x20\x85\xbd\x8d\x62\xd8\xb8\xbe\x44\xdd\x79\x7b\xcf\x3e\x22\xef\xf0\xb9\x42\x86\xf9\x11\x5d\xb0\xaa\x52\x52\xfb\xdf\x00\x00\x00\xff\xff\x86\x08\x46\x8f\xa4\x03\x00\x00")
func route_twirp_eclier_routes_putLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_routes_putLua,
"route_twirp_eclier_routes_put.lua",
)
}
func route_twirp_eclier_routes_putLua() (*asset, error) {
bytes, err := route_twirp_eclier_routes_putLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_routes_put.lua", size: 932, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_tokens_deactivateLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x7c\x92\x41\x6f\xdb\x30\x0c\x85\xef\xfa\x15\x0f\x3a\xd9\x40\x6c\x64\x87\x5d\x02\xf8\xb4\xf6\x30\x60\x58\x81\x21\x3b\x0d\xc5\xa0\xc8\xb4\x23\xd4\x91\x3c\x52\x72\xdb\x7f\x3f\x48\xce\x92\x74\x40\x7b\xb3\xc9\xef\x91\x8f\x14\x9b\x06\x5f\x42\x4f\x18\xc9\x13\x9b\x48\x3d\x0e\xaf\x98\x39\xc4\x60\x9b\x91\x7c\x13\x9f\x1d\xcf\xbf\xc9\x4e\x8e\x18\xcb\xe7\x76\xdb\x6e\xef\x1e\xf0\xfd\x61\x8f\xfb\xbb\xaf\x7b\xd5\x34\x90\x90\xd8\xd2\x0e\x1c\x52\xa4\xb6\x48\x95\x12\xcb\x6e\x8e\xed\x42\x7c\x40\x07\x1d\xc3\x13\x79\xd9\xf5\x64\x6c\x74\x8b\x89\xa4\xff\x11\x47\x9a\xe6\x4c\xdc\xbf\x90\x4d\x91\x04\x27\x8a\xc7\xd0\xe3\x8a\x22\x78\x08\xf1\xe2\x2c\x61\xad\x83\x21\x30\x8a\x31\xcc\xc6\x3e\x99\x91\xf0\x42\x19\x69\x93\xb4\xab\x8d\x15\xbc\x74\x31\x29\x1e\x03\xe7\x3e\x27\x63\x8f\xce\x53\x73\x99\x57\xdf\x78\x15\x17\x7c\x86\xd6\x39\x2f\x99\x24\xb9\x45\x07\xad\x95\x9a\x82\x35\x13\x86\xc9\x8c\xe8\xc0\xf4\x27\x39\x26\xe8\xfc\xaf\xcf\x39\x59\xec\x6d\x4a\x16\x7b\x95\x09\xba\xa2\x6d\x3d\x3d\x57\xb5\xca\xeb\xcb\xbf\xeb\x44\xfb\xec\x59\x0d\xb2\x3b\x84\x30\x55\xba\xcc\x4f\x7a\x83\xc1\x4c\x42\x1b\xe8\xc5\x4c\x89\x0a\x79\x22\x29\x96\x0c\x8f\x38\x63\x75\x16\x4a\x64\xe7\xc7\x4a\xbb\x5e\x6f\xa0\xf5\xbb\x1a\xd7\xbf\xe5\x0f\xa1\x7f\xfd\x58\x51\x88\x1b\x8d\x54\x5a\x6c\x98\x49\xde\x97\x9c\xf3\xb5\xfa\x7f\x8d\x83\xec\xca\x67\x5e\xc0\x90\xbc\x8d\x79\xeb\x9c\x7c\x65\x78\xac\x15\xe0\x86\xac\xff\xf5\xe9\x11\x5d\x07\xdd\xe4\x0b\xd1\x08\xfc\x26\x78\x8e\xc6\x23\x79\x05\x00\x33\x3b\x1f\xab\x6b\xe5\xba\x44\x99\x62\xe2\x0c\x90\xef\x95\x42\xa9\xb0\x7d\x44\x87\x9b\xf3\x54\xc0\xf5\x4d\x65\xb5\x37\x1b\x16\x5a\xed\x5c\xd2\x4c\x92\x0f\x55\x16\x5b\x1e\xcf\xcc\xee\xe7\x8f\x6f\x9b\xf5\x22\xeb\xdd\x7a\x6f\x55\x7d\x73\xe2\x55\x29\x58\x2b\x95\x9b\xff\x0d\x00\x00\xff\xff\x41\x42\x7e\x72\x67\x03\x00\x00")
func route_twirp_eclier_tokens_deactivateLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_tokens_deactivateLua,
"route_twirp_eclier_tokens_deactivate.lua",
)
}
func route_twirp_eclier_tokens_deactivateLua() (*asset, error) {
bytes, err := route_twirp_eclier_tokens_deactivateLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_tokens_deactivate.lua", size: 871, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_tokens_deleteLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x7c\x52\xc1\x6a\xdc\x30\x10\xbd\xeb\x2b\x1e\x3a\xd9\x10\x9b\xed\xa1\x97\x05\x9f\x9a\x1c\x0a\xa5\x81\xb2\x3d\x95\x50\xb4\xf2\xd8\x2b\xe2\x95\xdc\x19\xc9\x49\xfe\xbe\x48\xde\xee\xba\x85\xf4\x60\xb0\xf4\xde\x9b\x79\x33\x7a\x4d\x83\x4f\xa1\x27\x8c\xe4\x89\x4d\xa4\x1e\xc7\x37\xcc\x1c\x62\xb0\xcd\x48\xbe\x89\x2f\x8e\xe7\x9f\x64\x27\x47\x8c\xe5\x63\xbb\x6b\x77\xf7\x8f\xf8\xfa\x78\xc0\xc3\xfd\xe7\x83\x6a\x1a\x48\x48\x6c\x69\x0f\x0e\x29\x52\x5b\xa4\x4a\x89\x65\x37\xc7\x76\x21\x3e\xa2\x83\x8e\xe1\x99\xbc\xec\x7b\x9a\x28\x92\xfe\x83\x9e\x68\x9a\x33\xfa\xf0\x4a\x36\x45\x12\x9c\x29\x9e\x42\x8f\x95\x86\xe0\x21\xc4\x8b\xb3\x84\x55\x8f\x21\x30\x8a\x21\xcc\xc6\x3e\x9b\x91\xf0\x4a\x99\xd2\x26\x69\xd7\xf6\x2b\xf1\xda\xc1\xa4\x78\x0a\x9c\x7b\x9c\x8d\x3d\x39\x4f\xcd\x75\x4e\xbd\xf1\x28\x2e\xf8\x4c\x5a\xe7\xbb\x22\x49\x72\x8b\x0e\x5a\x2b\x35\x05\x6b\x26\x0c\x93\x19\xd1\x81\xe9\x57\x72\x4c\xd0\xf9\xac\x2f\x98\x2c\x76\x0b\xc9\x62\x6f\x32\x41\x57\xb4\xad\xa7\x97\xaa\x56\x79\x6d\xf9\xb8\x4e\x74\xc8\x9e\xd5\x20\x7b\x89\xec\xfc\x58\x69\xd7\xeb\x3b\xe8\xfc\x2d\x66\x4a\x54\x48\x67\x92\xe2\xc6\xf0\x08\xd7\xeb\x7a\xcb\x3f\x86\xfe\xed\xff\x8a\xc2\xd8\x68\xa4\xd2\x62\xc3\x4c\xf2\xbe\xe4\x82\x17\xd1\x31\x84\xa9\xd2\xc6\x46\xb7\x90\xbe\xc3\x60\x26\xa1\x77\x85\x17\x5a\xad\xfe\x5d\xe3\x20\xfb\xf2\x9b\x17\x30\x24\x6f\x63\xde\x3a\x27\x5f\x19\x1e\x6b\x05\xb8\x21\xeb\x7f\x7c\x78\x42\xd7\x41\x37\x39\x1d\x1a\x81\xff\xba\xbc\xdc\xc6\x13\x79\x05\x00\x33\x3b\x1f\xab\x5b\xe5\xba\xdc\x32\xc5\xc4\x99\x40\xbe\x57\x0a\xa5\xc2\xee\x09\x1d\x36\xb1\x54\xc0\xed\x4d\x65\xb5\x37\x1b\x16\x5a\xed\x5c\x61\x26\xc9\x21\x95\xc5\x96\xc7\x33\xb3\xfb\xfe\xed\xcb\xdd\x9a\xc8\x7a\xbf\xe6\xad\xaa\x2f\xd1\xae\x4a\xb1\x5a\xa9\xdc\xf8\x77\x00\x00\x00\xff\xff\x50\xe4\x70\xe6\x5b\x03\x00\x00")
func route_twirp_eclier_tokens_deleteLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_tokens_deleteLua,
"route_twirp_eclier_tokens_delete.lua",
)
}
func route_twirp_eclier_tokens_deleteLua() (*asset, error) {
bytes, err := route_twirp_eclier_tokens_deleteLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_tokens_delete.lua", size: 859, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_tokens_getLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x7c\x92\x41\x6b\xdc\x3e\x10\xc5\xef\xfa\x14\x0f\x9d\x6c\x88\x4d\xfe\x87\xff\xc5\xe0\x53\x13\x4a\xa1\x34\x10\xb6\xa7\x26\x14\xad\x3c\xf6\x8a\x78\x25\x67\x24\x39\xc9\xb7\x2f\x92\xb6\xbb\x6e\x61\x7b\x58\x58\xe9\xfd\xde\xe8\xcd\x78\x9a\x06\x9f\xdc\x40\x98\xc8\x12\xab\x40\x03\xf6\x1f\x58\xd8\x05\xa7\x9b\x89\x6c\x13\xde\x0c\x2f\x3f\x49\xcf\x86\x18\xeb\xff\xed\x6d\x7b\x7b\xf7\x80\x6f\x0f\x3b\xdc\xdf\x7d\xd9\x89\xa6\x81\x77\x91\x35\x75\x60\x17\x03\xb5\xd9\x2a\x84\xd7\x6c\x96\xd0\xae\xc4\x7b\xf4\x90\xc1\xbd\x90\xf5\xdd\x44\x41\xfe\x96\x0e\x34\x2f\x49\xba\x7f\x27\x1d\x03\x79\x1c\x29\x1c\xdc\x80\x89\x02\x9c\x85\x27\x5e\x8d\x26\x14\x27\x46\xc7\xc8\x51\xb0\x28\xfd\xa2\x26\xc2\x3b\x25\xa4\x8d\xbe\x2d\x0f\x17\xf0\x5c\x5e\xc5\x70\x70\x9c\x1e\x38\x2a\x7d\x30\x96\x9a\x73\x87\x72\x93\xce\x1b\x67\x13\x54\x3a\x3b\x2b\xd1\xa7\x27\x7a\x48\x29\xc4\xec\xb4\x9a\x31\xce\x6a\x42\x0f\xa6\xd7\x68\x98\x20\xd3\x59\x9e\x34\xbf\xea\xad\xe4\x57\x7d\xb1\x79\xf4\xd9\xdb\x5a\x7a\xab\x6a\x91\x06\x96\x8e\xa5\xa3\xcf\x14\x76\x29\xf6\x23\xbd\x46\xf2\x41\x8c\xbe\xf3\x81\x8d\x9d\x2a\x69\x06\x79\x03\x99\x7e\xab\x9a\x23\x65\xfc\x48\x3e\xe7\x52\x3c\xc1\x0c\xb2\xde\xf2\xb9\xfd\x7f\x5b\x0a\x52\x8b\xbf\x9b\x1c\x7d\x97\xff\xa6\x78\x63\xb4\x3a\xa4\x99\x70\xb4\x95\xe2\xa9\x16\x80\x19\x93\xfd\xc7\x7f\xcf\xe8\x7b\xc8\x26\x7d\x38\x09\xc7\x7f\x5c\x9e\x6e\xc3\x81\xac\x00\x80\x85\x8d\x0d\xd5\xa5\x72\x9d\x6f\x99\x42\xe4\x04\x90\x1d\x84\x40\xae\x70\xfb\x8c\x1e\x9b\x75\x11\xc0\x65\xe2\xbe\xc4\x5b\x14\x7b\x2a\x71\xce\x32\x93\x4f\xfb\xe3\x57\x9d\x47\xab\x16\xf3\xfd\xf1\xeb\x4d\x69\xb2\xee\xca\x36\x54\x75\x5a\xb9\x2a\x57\xca\xd6\x12\x4b\x9a\xa1\x7b\x0a\x4f\x41\xa2\x6d\x11\xdc\x69\x84\xa9\x60\x6b\x86\x1c\xf5\xc4\xed\xdd\xf0\x71\x8d\x4c\xda\x96\xf5\xda\x2d\xe4\xaf\xd1\x45\xdd\xf2\x4a\x07\xb3\xd2\x35\xbe\xa8\x75\x2d\xd2\xa4\x7e\x05\x00\x00\xff\xff\x6f\x53\x96\xcd\xa4\x03\x00\x00")
func route_twirp_eclier_tokens_getLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_tokens_getLua,
"route_twirp_eclier_tokens_get.lua",
)
}
func route_twirp_eclier_tokens_getLua() (*asset, error) {
bytes, err := route_twirp_eclier_tokens_getLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_tokens_get.lua", size: 932, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_tokens_get_allLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x5c\x52\x4d\x6f\xdd\x20\x10\xbc\xf3\x2b\x46\x9c\x6c\xa9\xb6\x5e\x0f\xbd\x3c\xc9\xa7\x26\x87\x4a\x55\x22\x55\xe9\xa9\x89\x22\xc2\x5b\xdb\x28\x0e\xb8\xcb\xe2\xa4\xff\xbe\x02\xde\x57\x7b\x83\x9d\xd9\xdd\x19\x86\xae\xc3\xd7\x70\x20\x4c\xe4\x89\x8d\xd0\x01\x2f\x7f\xb0\x72\x90\x60\xbb\x89\x7c\x27\xef\x8e\xd7\x67\xb2\x8b\x23\xc6\xf6\xa5\xdf\xf5\xbb\x9b\x7b\xdc\xdd\x3f\xe0\xf6\xe6\xdb\x83\xea\x3a\xc4\x90\xd8\xd2\x1e\x1c\x92\x50\x5f\x5a\x95\x8a\x96\xdd\x2a\xfd\x46\xfc\x82\x01\x5a\xc2\x2b\xf9\xb8\x9f\x48\x9e\xcd\xb2\xe8\x13\x3c\xd3\xb2\x66\xf8\xf6\x83\x6c\x12\x8a\x78\x23\x99\xc3\x01\x47\x1e\x82\x47\x24\xde\x9c\x25\xd4\x09\x18\x03\xa3\x48\xc2\x6a\xec\xab\x99\x08\x1f\x94\x29\x7d\x8a\x7d\x15\x50\x89\xe7\x15\x26\xc9\x1c\x38\x2f\x79\x33\x76\x76\x9e\xba\xb3\x53\x7d\xa5\x32\xba\xe0\x33\xa9\x3a\x3c\x23\x29\xe6\x15\x03\xb4\x56\x6a\x09\xd6\x2c\x18\x17\x33\x61\x00\xd3\xef\xe4\x98\xa0\xf3\x5d\x1f\xb1\xb8\xd9\x6b\x28\x6e\xf6\xd2\x16\x31\x94\xde\xde\xd3\x7b\xd3\xaa\xfc\x70\xf9\x5a\x1d\xdd\xb9\x45\xfd\xbf\x72\x8c\xfb\x72\xcc\xe4\x31\x79\x2b\x59\x21\x27\xdf\x18\x9e\x5a\x05\xb8\x11\x86\xa7\x5f\x9f\x9f\x30\x0c\xd0\x5d\x7e\x4a\x8d\xc0\xff\x14\x8f\x55\x99\xc9\x2b\x00\x58\xd9\x79\x69\x2e\x93\xdb\x52\x65\x92\xc4\x99\x40\xfe\xa0\x14\xca\x84\xdd\x13\x06\x5c\x85\xa8\x80\x8b\xff\x58\xe5\xad\x86\x23\x55\x39\x67\x98\x29\xe6\x44\xe3\x66\x8b\x51\xb3\xba\x9f\x3f\xbe\x7f\xaa\xe9\xb5\xfb\x9a\x4d\xd3\x9e\x3e\x42\x53\xa6\x95\xf6\x2a\xed\xf4\x51\x1e\xe5\x51\x34\xfa\x1e\x12\xa2\xb0\xf3\x53\x93\x07\x1f\xb3\x6d\x5b\x95\x95\xfe\x0d\x00\x00\xff\xff\xe9\x8f\x90\xdc\xba\x02\x00\x00")
func route_twirp_eclier_tokens_get_allLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_tokens_get_allLua,
"route_twirp_eclier_tokens_get_all.lua",
)
}
func route_twirp_eclier_tokens_get_allLua() (*asset, error) {
bytes, err := route_twirp_eclier_tokens_get_allLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_tokens_get_all.lua", size: 698, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _route_twirp_eclier_tokens_putLua = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x7c\x92\x41\x6b\xdc\x30\x10\x85\xef\xfa\x15\x0f\x9d\x6c\x88\x4d\x7a\xe8\xc5\xe0\x53\x93\x43\xa1\x34\x50\xd2\x53\x13\x8a\x56\x1e\x7b\x45\xbc\x92\xab\x91\x9c\xe4\xdf\x17\x49\xee\xae\x5b\xd8\x1e\x16\xd6\x7a\xdf\x1b\x3d\xcd\x4c\xd3\xe0\x93\x1b\x08\x13\x59\xf2\x2a\xd0\x80\xc3\x3b\x16\xef\x82\xd3\xcd\x44\xb6\x09\xaf\xc6\x2f\x3f\x49\xcf\x86\x3c\xd6\x8f\xed\x6d\x7b\x7b\xf7\x80\xaf\x0f\x8f\xb8\xbf\xfb\xfc\x28\x9a\x06\xec\xa2\xd7\xd4\xc1\xbb\x18\xa8\xcd\x56\x21\x58\x7b\xb3\x84\x76\x25\x7f\x40\x0f\x19\xdc\x0b\x59\xee\x96\x18\xe4\x1f\xe9\x48\xf3\x92\xa4\xfb\x37\xd2\x31\x10\xe3\x44\xe1\xe8\x06\x2c\x31\xc0\x59\x30\xf9\xd5\x68\x42\x71\x62\x74\x1e\x39\x0a\x16\xa5\x5f\xd4\x44\x78\xa3\x84\xb4\x91\xdb\x72\x71\x01\xcf\xe5\x55\x0c\x47\xe7\xd3\x05\x27\xa5\x8f\xc6\x52\x73\x7e\xa1\xdc\xa5\x63\xe3\x6c\x82\xca\xcb\xce\x4a\xe4\x74\x45\x0f\x29\x85\x98\x9d\x56\x33\xc6\x59\x4d\xe8\xe1\xe9\x57\x34\x9e\x20\xd3\xb7\xdc\x34\x5e\xf5\x5e\xe2\x55\x5f\x6c\x8c\x3e\x7b\x5b\x4b\xaf\x55\x2d\x52\xc3\xd2\x67\x79\xd1\x63\xca\x2c\x46\xee\x38\x78\x63\xa7\x4a\x9a\x41\xde\x40\xa6\xdf\xaa\xe6\x48\x19\x3a\x11\xe7\x34\xca\x4f\x30\x83\xac\xf7\xfc\xc1\x0d\xef\xff\x77\x64\x62\xe7\xe1\x4a\xb2\x76\x0b\xf1\x75\xcb\xa6\x67\xd3\xc1\xb9\xb9\x92\x4a\x07\xb3\x92\xbc\xc1\xa8\x66\xa6\xab\xc6\x0d\xab\xc5\xbf\x6d\x1c\xb9\xcb\x7f\x53\x03\xc6\x68\x75\x48\x5d\xf7\xd1\x56\xca\x4f\xb5\x00\xcc\x98\xfc\x3f\x3e\x3c\xa3\xef\x21\x9b\xb4\x1a\x12\xce\xff\x75\xb8\x9d\x86\x23\x59\x01\x00\x8b\x37\x36\x54\x97\xca\x75\x3e\xf5\x14\xa2\x4f\x00\xd9\x41\x08\xe4\x0a\xb7\xcf\xe8\xb1\x5b\x48\x01\x5c\x66\xca\x25\xde\xa2\x3c\x53\x89\x73\x96\x3d\x71\xda\x50\x5e\x75\x1e\x9e\x5a\xcc\xf7\x6f\x5f\x6e\xca\x46\xd6\x5d\xd9\xb7\xaa\x4e\x4b\x5d\xe5\x4a\xd9\x5a\x62\x49\x33\x74\x4f\xe1\x29\x48\xb4\x2d\x82\xdb\xc6\x95\x0a\xb6\x66\xc8\x51\x37\x2e\x8d\xe7\x1a\x99\xb4\x3d\x5b\xe6\x72\x8d\x2e\xea\x9e\x2f\xe3\xb8\xc6\x17\xb5\xae\x45\xea\xd4\xef\x00\x00\x00\xff\xff\x38\x3f\xda\x56\x06\x04\x00\x00")
func route_twirp_eclier_tokens_putLuaBytes() ([]byte, error) {
return bindataRead(
_route_twirp_eclier_tokens_putLua,
"route_twirp_eclier_tokens_put.lua",
)
}
func route_twirp_eclier_tokens_putLua() (*asset, error) {
bytes, err := route_twirp_eclier_tokens_putLuaBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "route_twirp_eclier_tokens_put.lua", size: 1030, mode: os.FileMode(420), modTime: time.Unix(1516605524, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, fmt.Errorf("Asset %s not found", name)
}
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, fmt.Errorf("AssetInfo %s not found", name)
}
// AssetNames returns the names of the assets.
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
// _bindata is a table, holding each asset generator, mapped to its name.
var _bindata = map[string]func() (*asset, error){
"route_twirp_eclier_backends_kill.lua": route_twirp_eclier_backends_killLua,
"route_twirp_eclier_backends_list.lua": route_twirp_eclier_backends_listLua,
"route_twirp_eclier_routes_delete.lua": route_twirp_eclier_routes_deleteLua,
"route_twirp_eclier_routes_get.lua": route_twirp_eclier_routes_getLua,
"route_twirp_eclier_routes_get_all.lua": route_twirp_eclier_routes_get_allLua,
"route_twirp_eclier_routes_put.lua": route_twirp_eclier_routes_putLua,
"route_twirp_eclier_tokens_deactivate.lua": route_twirp_eclier_tokens_deactivateLua,
"route_twirp_eclier_tokens_delete.lua": route_twirp_eclier_tokens_deleteLua,
"route_twirp_eclier_tokens_get.lua": route_twirp_eclier_tokens_getLua,
"route_twirp_eclier_tokens_get_all.lua": route_twirp_eclier_tokens_get_allLua,
"route_twirp_eclier_tokens_put.lua": route_twirp_eclier_tokens_putLua,
}
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, fmt.Errorf("Asset %s not found", name)
}
}
}
if node.Func != nil {
return nil, fmt.Errorf("Asset %s not found", name)
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{nil, map[string]*bintree{
"route_twirp_eclier_backends_kill.lua": &bintree{route_twirp_eclier_backends_killLua, map[string]*bintree{}},
"route_twirp_eclier_backends_list.lua": &bintree{route_twirp_eclier_backends_listLua, map[string]*bintree{}},
"route_twirp_eclier_routes_delete.lua": &bintree{route_twirp_eclier_routes_deleteLua, map[string]*bintree{}},
"route_twirp_eclier_routes_get.lua": &bintree{route_twirp_eclier_routes_getLua, map[string]*bintree{}},
"route_twirp_eclier_routes_get_all.lua": &bintree{route_twirp_eclier_routes_get_allLua, map[string]*bintree{}},
"route_twirp_eclier_routes_put.lua": &bintree{route_twirp_eclier_routes_putLua, map[string]*bintree{}},
"route_twirp_eclier_tokens_deactivate.lua": &bintree{route_twirp_eclier_tokens_deactivateLua, map[string]*bintree{}},
"route_twirp_eclier_tokens_delete.lua": &bintree{route_twirp_eclier_tokens_deleteLua, map[string]*bintree{}},
"route_twirp_eclier_tokens_get.lua": &bintree{route_twirp_eclier_tokens_getLua, map[string]*bintree{}},
"route_twirp_eclier_tokens_get_all.lua": &bintree{route_twirp_eclier_tokens_get_allLua, map[string]*bintree{}},
"route_twirp_eclier_tokens_put.lua": &bintree{route_twirp_eclier_tokens_putLua, map[string]*bintree{}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
err = os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
if err != nil {
return err
}
return nil
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}

View File

@ -1,31 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "backends:kill"
script.help = "Executes method kill on service backends for twirp package xeserv.us.route.backends"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for BackendId
fs:string("id", "", "value for message arg id")
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):backends():kill(flags)
end

View File

@ -1,34 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "backends:list"
script.help = "Executes method list on service backends for twirp package xeserv.us.route.backends"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for BackendSelector
fs:string("domain", "", "value for message arg domain")
fs:string("user", "", "value for message arg user")
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):backends():list(flags)
print("bs:\t\t" .. tostring(resp.bs))
print("backends:\t\t" .. tostring(resp.backends))
end

View File

@ -1,33 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "routes:delete"
script.help = "Executes method delete on service routes for twirp package xeserv.us.route.routes"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for Route
fs:string("id", "", "value for message arg id")
fs:string("creator", "", "value for message arg creator")
fs:string("host", "", "value for message arg host")
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):routes():delete(flags)
end

View File

@ -1,35 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "routes:get"
script.help = "Executes method get on service routes for twirp package xeserv.us.route.routes"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for GetRouteRequest
fs:string("unused", "", "value for message arg unused")
fs:string("id", "", "value for message arg id")
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):routes():get(flags)
print("id:\t\t" .. tostring(resp.id))
print("creator:\t\t" .. tostring(resp.creator))
print("host:\t\t" .. tostring(resp.host))
end

View File

@ -1,31 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "routes:get_all"
script.help = "Executes method get_all on service routes for twirp package xeserv.us.route.routes"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for Nil
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):routes():get_all(flags)
print("routes:\t\t" .. tostring(resp.routes))
end

View File

@ -1,36 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "routes:put"
script.help = "Executes method put on service routes for twirp package xeserv.us.route.routes"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for Route
fs:string("host", "", "value for message arg host")
fs:string("id", "", "value for message arg id")
fs:string("creator", "", "value for message arg creator")
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):routes():put(flags)
print("id:\t\t" .. tostring(resp.id))
print("creator:\t\t" .. tostring(resp.creator))
print("host:\t\t" .. tostring(resp.host))
end

View File

@ -1,34 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "tokens:deactivate"
script.help = "Executes method deactivate on service tokens for twirp package xeserv.us.route.tokens"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for Token
fs:bool("active", false, "value for message arg active")
fs:string("id", "", "value for message arg id")
fs:string("body", "", "value for message arg body")
fs:strings("scopes", "value for message arg scopes")
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):tokens():deactivate(flags)
end

View File

@ -1,34 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "tokens:delete"
script.help = "Executes method delete on service tokens for twirp package xeserv.us.route.tokens"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for Token
fs:string("id", "", "value for message arg id")
fs:string("body", "", "value for message arg body")
fs:strings("scopes", "value for message arg scopes")
fs:bool("active", false, "value for message arg active")
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):tokens():delete(flags)
end

View File

@ -1,36 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "tokens:get"
script.help = "Executes method get on service tokens for twirp package xeserv.us.route.tokens"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for GetTokenRequest
fs:string("id", "", "value for message arg id")
fs:string("token", "", "value for message arg token")
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):tokens():get(flags)
print("id:\t\t" .. tostring(resp.id))
print("body:\t\t" .. tostring(resp.body))
print("scopes:\t\t" .. tostring(resp.scopes))
print("active:\t\t" .. tostring(resp.active))
end

View File

@ -1,31 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "tokens:get_all"
script.help = "Executes method get_all on service tokens for twirp package xeserv.us.route.tokens"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for Nil
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):tokens():get_all(flags)
print("tokens:\t\t" .. tostring(resp.tokens))
end

View File

@ -1,38 +0,0 @@
-- Code generated by protoc-gen-twirp_eclier v5.0.0DO NOT EDIT
-- source: route.proto
script.verb = "tokens:put"
script.help = "Executes method put on service tokens for twirp package xeserv.us.route.tokens"
script.author = "machine-generated"
script.version = "v5.0.0"
script.usage = ""
local flag = require "flag"
local svc = require "svc"
local fs = flag.new()
-- flags for Token
fs:string("id", "", "value for message arg id")
fs:string("body", "", "value for message arg body")
fs:strings("scopes", "value for message arg scopes")
fs:bool("active", false, "value for message arg active")
script.usage = fs:usage()
function run(arg)
if arg[1] == "-help" or arg[1] == "--help" then
print(fs:usage())
return
end
arg[0] = script.verb
local flags = fs:parse(arg)
local resp = svc.new(apiURL, token):tokens():put(flags)
print("id:\t\t" .. tostring(resp.id))
print("body:\t\t" .. tostring(resp.body))
print("scopes:\t\t" .. tostring(resp.scopes))
print("active:\t\t" .. tostring(resp.active))
end

View File

@ -3,5 +3,4 @@
protoc -I. \
--go_out=:. \
--twirp_out=. \
--twirp_eclier_out=./eclier \
route.proto

View File

@ -1,7 +0,0 @@
Copyright (C) 2014 Thomas Rooney
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,64 +0,0 @@
# Gexpect
Gexpect is a pure golang expect-like module.
It makes it simpler to create and control other terminal applications.
child, err := gexpect.Spawn("python")
if err != nil {
panic(err)
}
child.Expect(">>>")
child.SendLine("print 'Hello World'")
child.Interact()
child.Close()
## Examples
`Spawn` handles the argument parsing from a string
child.Spawn("/bin/sh -c 'echo \"my complicated command\" | tee log | cat > log2'")
child.ReadLine() // ReadLine() (string, error)
child.ReadUntil(' ') // ReadUntil(delim byte) ([]byte, error)
`ReadLine`, `ReadUntil` and `SendLine` send strings from/to `stdout/stdin` respectively
child, _ := gexpect.Spawn("cat")
child.SendLine("echoing process_stdin") // SendLine(command string) (error)
msg, _ := child.ReadLine() // msg = echoing process_stdin
`Wait` and `Close` allow for graceful and ungraceful termination.
child.Wait() // Waits until the child terminates naturally.
child.Close() // Sends a kill command
`AsyncInteractChannels` spawns two go routines to pipe into and from `stdout`/`stdin`, allowing for some usecases to be a little simpler.
child, _ := gexpect.Spawn("sh")
sender, receiver := child.AsyncInteractChannels()
sender <- "echo Hello World\n" // Send to stdin
line, open := <- receiver // Recieve a line from stdout/stderr
// When the subprocess stops (e.g. with child.Close()) , receiver is closed
if open {
fmt.Printf("Received %s", line)
}
`ExpectRegex` uses golang's internal regex engine to wait until a match from the process with the given regular expression (or an error on process termination with no match).
child, _ := gexpect.Spawn("echo accb")
match, _ := child.ExpectRegex("a..b")
// (match=true)
`ExpectRegexFind` allows for groups to be extracted from process stdout. The first element is an array of containing the total matched text, followed by each subexpression group match.
child, _ := gexpect.Spawn("echo 123 456 789")
result, _ := child.ExpectRegexFind("\d+ (\d+) (\d+)")
// result = []string{"123 456 789", "456", "789"}
See `gexpect_test.go` and the `examples` folder for full syntax
## Credits
github.com/kballard/go-shellquote
github.com/kr/pty
KMP Algorithm: "http://blog.databigbang.com/searching-for-substrings-in-streams-a-slight-modification-of-the-knuth-morris-pratt-algorithm-in-haxe/"

View File

@ -1,449 +0,0 @@
// +build !windows
package gexpect
import (
"bytes"
"errors"
"fmt"
"io"
"os"
"os/exec"
"regexp"
"time"
"unicode/utf8"
shell "github.com/kballard/go-shellquote"
"github.com/kr/pty"
)
var (
ErrEmptySearch = errors.New("empty search string")
)
type ExpectSubprocess struct {
Cmd *exec.Cmd
buf *buffer
outputBuffer []byte
}
type buffer struct {
f *os.File
b bytes.Buffer
collect bool
collection bytes.Buffer
}
func (buf *buffer) StartCollecting() {
buf.collect = true
}
func (buf *buffer) StopCollecting() (result string) {
result = string(buf.collection.Bytes())
buf.collect = false
buf.collection.Reset()
return result
}
func (buf *buffer) Read(chunk []byte) (int, error) {
nread := 0
if buf.b.Len() > 0 {
n, err := buf.b.Read(chunk)
if err != nil {
return n, err
}
if n == len(chunk) {
return n, nil
}
nread = n
}
fn, err := buf.f.Read(chunk[nread:])
return fn + nread, err
}
func (buf *buffer) ReadRune() (r rune, size int, err error) {
l := buf.b.Len()
chunk := make([]byte, utf8.UTFMax)
if l > 0 {
n, err := buf.b.Read(chunk)
if err != nil {
return 0, 0, err
}
if utf8.FullRune(chunk[:n]) {
r, rL := utf8.DecodeRune(chunk)
if n > rL {
buf.PutBack(chunk[rL:n])
}
if buf.collect {
buf.collection.WriteRune(r)
}
return r, rL, nil
}
}
// else add bytes from the file, then try that
for l < utf8.UTFMax {
fn, err := buf.f.Read(chunk[l : l+1])
if err != nil {
return 0, 0, err
}
l = l + fn
if utf8.FullRune(chunk[:l]) {
r, rL := utf8.DecodeRune(chunk)
if buf.collect {
buf.collection.WriteRune(r)
}
return r, rL, nil
}
}
return 0, 0, errors.New("File is not a valid UTF=8 encoding")
}
func (buf *buffer) PutBack(chunk []byte) {
if len(chunk) == 0 {
return
}
if buf.b.Len() == 0 {
buf.b.Write(chunk)
return
}
d := make([]byte, 0, len(chunk)+buf.b.Len())
d = append(d, chunk...)
d = append(d, buf.b.Bytes()...)
buf.b.Reset()
buf.b.Write(d)
}
func SpawnAtDirectory(command string, directory string) (*ExpectSubprocess, error) {
expect, err := _spawn(command)
if err != nil {
return nil, err
}
expect.Cmd.Dir = directory
return _start(expect)
}
func Command(command string) (*ExpectSubprocess, error) {
expect, err := _spawn(command)
if err != nil {
return nil, err
}
return expect, nil
}
func (expect *ExpectSubprocess) Start() error {
_, err := _start(expect)
return err
}
func Spawn(command string) (*ExpectSubprocess, error) {
expect, err := _spawn(command)
if err != nil {
return nil, err
}
return _start(expect)
}
func (expect *ExpectSubprocess) Close() error {
if err := expect.Cmd.Process.Kill(); err != nil {
return err
}
if err := expect.buf.f.Close(); err != nil {
return err
}
return nil
}
func (expect *ExpectSubprocess) AsyncInteractChannels() (send chan string, receive chan string) {
receive = make(chan string)
send = make(chan string)
go func() {
for {
str, err := expect.ReadLine()
if err != nil {
close(receive)
return
}
receive <- str
}
}()
go func() {
for {
select {
case sendCommand, exists := <-send:
{
if !exists {
return
}
err := expect.Send(sendCommand)
if err != nil {
receive <- "gexpect Error: " + err.Error()
return
}
}
}
}
}()
return
}
func (expect *ExpectSubprocess) ExpectRegex(regex string) (bool, error) {
return regexp.MatchReader(regex, expect.buf)
}
func (expect *ExpectSubprocess) expectRegexFind(regex string, output bool) ([]string, string, error) {
re, err := regexp.Compile(regex)
if err != nil {
return nil, "", err
}
expect.buf.StartCollecting()
pairs := re.FindReaderSubmatchIndex(expect.buf)
stringIndexedInto := expect.buf.StopCollecting()
l := len(pairs)
numPairs := l / 2
result := make([]string, numPairs)
for i := 0; i < numPairs; i += 1 {
result[i] = stringIndexedInto[pairs[i*2]:pairs[i*2+1]]
}
// convert indexes to strings
if len(result) == 0 {
err = fmt.Errorf("ExpectRegex didn't find regex '%v'.", regex)
} else {
// The number in pairs[1] is an index of a first
// character outside the whole match
putBackIdx := pairs[1]
if len(stringIndexedInto) > putBackIdx {
stringToPutBack := stringIndexedInto[putBackIdx:]
stringIndexedInto = stringIndexedInto[:putBackIdx]
expect.buf.PutBack([]byte(stringToPutBack))
}
}
return result, stringIndexedInto, err
}
func (expect *ExpectSubprocess) expectTimeoutRegexFind(regex string, timeout time.Duration) (result []string, out string, err error) {
t := make(chan bool)
go func() {
result, out, err = expect.ExpectRegexFindWithOutput(regex)
t <- false
}()
go func() {
time.Sleep(timeout)
err = fmt.Errorf("ExpectRegex timed out after %v finding '%v'.\nOutput:\n%s", timeout, regex, expect.Collect())
t <- true
}()
<-t
return result, out, err
}
func (expect *ExpectSubprocess) ExpectRegexFind(regex string) ([]string, error) {
result, _, err := expect.expectRegexFind(regex, false)
return result, err
}
func (expect *ExpectSubprocess) ExpectTimeoutRegexFind(regex string, timeout time.Duration) ([]string, error) {
result, _, err := expect.expectTimeoutRegexFind(regex, timeout)
return result, err
}
func (expect *ExpectSubprocess) ExpectRegexFindWithOutput(regex string) ([]string, string, error) {
return expect.expectRegexFind(regex, true)
}
func (expect *ExpectSubprocess) ExpectTimeoutRegexFindWithOutput(regex string, timeout time.Duration) ([]string, string, error) {
return expect.expectTimeoutRegexFind(regex, timeout)
}
func buildKMPTable(searchString string) []int {
pos := 2
cnd := 0
length := len(searchString)
var table []int
if length < 2 {
length = 2
}
table = make([]int, length)
table[0] = -1
table[1] = 0
for pos < len(searchString) {
if searchString[pos-1] == searchString[cnd] {
cnd += 1
table[pos] = cnd
pos += 1
} else if cnd > 0 {
cnd = table[cnd]
} else {
table[pos] = 0
pos += 1
}
}
return table
}
func (expect *ExpectSubprocess) ExpectTimeout(searchString string, timeout time.Duration) (e error) {
result := make(chan error)
go func() {
result <- expect.Expect(searchString)
}()
select {
case e = <-result:
case <-time.After(timeout):
e = fmt.Errorf("Expect timed out after %v waiting for '%v'.\nOutput:\n%s", timeout, searchString, expect.Collect())
}
return e
}
func (expect *ExpectSubprocess) Expect(searchString string) (e error) {
target := len(searchString)
if target < 1 {
return ErrEmptySearch
}
chunk := make([]byte, target*2)
if expect.outputBuffer != nil {
expect.outputBuffer = expect.outputBuffer[0:]
}
m := 0
i := 0
// Build KMP Table
table := buildKMPTable(searchString)
for {
n, err := expect.buf.Read(chunk)
if n == 0 && err != nil {
return err
}
if expect.outputBuffer != nil {
expect.outputBuffer = append(expect.outputBuffer, chunk[:n]...)
}
offset := m + i
for m+i-offset < n {
if searchString[i] == chunk[m+i-offset] {
i += 1
if i == target {
unreadIndex := m + i - offset
if len(chunk) > unreadIndex {
expect.buf.PutBack(chunk[unreadIndex:n])
}
return nil
}
} else {
m += i - table[i]
if table[i] > -1 {
i = table[i]
} else {
i = 0
}
}
}
}
}
func (expect *ExpectSubprocess) Send(command string) error {
_, err := io.WriteString(expect.buf.f, command)
return err
}
func (expect *ExpectSubprocess) Capture() {
if expect.outputBuffer == nil {
expect.outputBuffer = make([]byte, 0)
}
}
func (expect *ExpectSubprocess) Collect() []byte {
collectOutput := make([]byte, len(expect.outputBuffer))
copy(collectOutput, expect.outputBuffer)
expect.outputBuffer = nil
return collectOutput
}
func (expect *ExpectSubprocess) SendLine(command string) error {
_, err := io.WriteString(expect.buf.f, command+"\r\n")
return err
}
func (expect *ExpectSubprocess) Interact() {
defer expect.Cmd.Wait()
io.Copy(os.Stdout, &expect.buf.b)
go io.Copy(os.Stdout, expect.buf.f)
go io.Copy(expect.buf.f, os.Stdin)
}
func (expect *ExpectSubprocess) ReadUntil(delim byte) ([]byte, error) {
join := make([]byte, 0, 512)
chunk := make([]byte, 255)
for {
n, err := expect.buf.Read(chunk)
for i := 0; i < n; i++ {
if chunk[i] == delim {
if len(chunk) > i+1 {
expect.buf.PutBack(chunk[i+1:n])
}
return join, nil
} else {
join = append(join, chunk[i])
}
}
if err != nil {
return join, err
}
}
}
func (expect *ExpectSubprocess) Wait() error {
return expect.Cmd.Wait()
}
func (expect *ExpectSubprocess) ReadLine() (string, error) {
str, err := expect.ReadUntil('\n')
return string(str), err
}
func _start(expect *ExpectSubprocess) (*ExpectSubprocess, error) {
f, err := pty.Start(expect.Cmd)
if err != nil {
return nil, err
}
expect.buf.f = f
return expect, nil
}
func _spawn(command string) (*ExpectSubprocess, error) {
wrapper := new(ExpectSubprocess)
wrapper.outputBuffer = nil
splitArgs, err := shell.Split(command)
if err != nil {
return nil, err
}
numArguments := len(splitArgs) - 1
if numArguments < 0 {
return nil, errors.New("gexpect: No command given to spawn")
}
path, err := exec.LookPath(splitArgs[0])
if err != nil {
return nil, err
}
if numArguments >= 1 {
wrapper.Cmd = exec.Command(path, splitArgs[1:]...)
} else {
wrapper.Cmd = exec.Command(path)
}
wrapper.buf = new(buffer)
return wrapper, nil
}

View File

@ -1,419 +0,0 @@
// +build !windows
package gexpect
import (
"bytes"
"fmt"
"io/ioutil"
"strings"
"testing"
"time"
)
func TestEmptySearchString(t *testing.T) {
t.Logf("Testing empty search string...")
child, err := Spawn("echo Hello World")
if err != nil {
t.Fatal(err)
}
err = child.Expect("")
if err != ErrEmptySearch {
t.Fatalf("Expected empty search error, got %v", err)
}
}
func TestHelloWorld(t *testing.T) {
t.Logf("Testing Hello World... ")
child, err := Spawn("echo \"Hello World\"")
if err != nil {
t.Fatal(err)
}
err = child.Expect("Hello World")
if err != nil {
t.Fatal(err)
}
}
func TestDoubleHelloWorld(t *testing.T) {
t.Logf("Testing Double Hello World... ")
child, err := Spawn(`sh -c "echo Hello World ; echo Hello ; echo Hi"`)
if err != nil {
t.Fatal(err)
}
err = child.Expect("Hello World")
if err != nil {
t.Fatal(err)
}
err = child.Expect("Hello")
if err != nil {
t.Fatal(err)
}
err = child.Expect("Hi")
if err != nil {
t.Fatal(err)
}
}
func TestHelloWorldFailureCase(t *testing.T) {
t.Logf("Testing Hello World Failure case... ")
child, err := Spawn("echo \"Hello World\"")
if err != nil {
t.Fatal(err)
}
err = child.Expect("YOU WILL NEVER FIND ME")
if err != nil {
return
}
t.Fatal("Expected an error for TestHelloWorldFailureCase")
}
func TestBiChannel(t *testing.T) {
t.Logf("Testing BiChannel screen... ")
child, err := Spawn("cat")
if err != nil {
t.Fatal(err)
}
sender, receiver := child.AsyncInteractChannels()
wait := func(str string) {
for {
msg, open := <-receiver
if !open {
return
}
if strings.Contains(msg, str) {
return
}
}
}
endlChar := fmt.Sprintln("")
sender <- fmt.Sprintf("echo%v", endlChar)
wait("echo")
sender <- fmt.Sprintf("echo2%v", endlChar)
wait("echo2")
child.Close()
child.Wait()
}
func TestCommandStart(t *testing.T) {
t.Logf("Testing Command... ")
// Doing this allows you to modify the cmd struct prior to execution, for example to add environment variables
child, err := Command("echo 'Hello World'")
if err != nil {
t.Fatal(err)
}
child.Start()
child.Expect("Hello World")
}
var regexMatchTests = []struct {
re string
good string
bad string
}{
{`a`, `a`, `b`},
{`.b`, `ab`, `ac`},
{`a+hello`, `aaaahello`, `bhello`},
{`(hello|world)`, `hello`, `unknown`},
{`(hello|world)`, `world`, `unknown`},
{"\u00a9", "\u00a9", `unknown`}, // 2 bytes long unicode character "copyright sign"
}
func TestRegexMatch(t *testing.T) {
t.Logf("Testing Regular Expression Matching... ")
for _, tt := range regexMatchTests {
runTest := func(input string) bool {
var match bool
child, err := Spawn("echo \"" + input + "\"")
if err != nil {
t.Fatal(err)
}
match, err = child.ExpectRegex(tt.re)
if err != nil {
t.Fatal(err)
}
return match
}
if !runTest(tt.good) {
t.Errorf("Regex Not matching [%#q] with pattern [%#q]", tt.good, tt.re)
}
if runTest(tt.bad) {
t.Errorf("Regex Matching [%#q] with pattern [%#q]", tt.bad, tt.re)
}
}
}
var regexFindTests = []struct {
re string
input string
matches []string
}{
{`he(l)lo wo(r)ld`, `hello world`, []string{"hello world", "l", "r"}},
{`(a)`, `a`, []string{"a", "a"}},
{`so.. (hello|world)`, `so.. hello`, []string{"so.. hello", "hello"}},
{`(a+)hello`, `aaaahello`, []string{"aaaahello", "aaaa"}},
{`\d+ (\d+) (\d+)`, `123 456 789`, []string{"123 456 789", "456", "789"}},
{`\d+ (\d+) (\d+)`, "\u00a9 123 456 789 \u00a9", []string{"123 456 789", "456", "789"}}, // check unicode characters
}
func TestRegexFind(t *testing.T) {
t.Logf("Testing Regular Expression Search... ")
for _, tt := range regexFindTests {
runTest := func(input string) []string {
child, err := Spawn("echo \"" + input + "\"")
if err != nil {
t.Fatal(err)
}
matches, err := child.ExpectRegexFind(tt.re)
if err != nil {
t.Fatal(err)
}
return matches
}
matches := runTest(tt.input)
if len(matches) != len(tt.matches) {
t.Fatalf("Regex not producing the expected number of patterns.. got[%d] ([%s]) expected[%d] ([%s])",
len(matches), strings.Join(matches, ","),
len(tt.matches), strings.Join(tt.matches, ","))
}
for i, _ := range matches {
if matches[i] != tt.matches[i] {
t.Errorf("Regex Expected group [%s] and got group [%s] with pattern [%#q] and input [%s]",
tt.matches[i], matches[i], tt.re, tt.input)
}
}
}
}
func TestReadLine(t *testing.T) {
t.Logf("Testing ReadLine...")
child, err := Spawn("echo \"foo\nbar\"")
if err != nil {
t.Fatal(err)
}
s, err := child.ReadLine()
if err != nil {
t.Fatal(err)
}
if s != "foo\r" {
t.Fatalf("expected 'foo\\r', got '%s'", s)
}
s, err = child.ReadLine()
if err != nil {
t.Fatal(err)
}
if s != "bar\r" {
t.Fatalf("expected 'bar\\r', got '%s'", s)
}
}
func TestRegexWithOutput(t *testing.T) {
t.Logf("Testing Regular Expression search with output...")
s := "You will not find me"
p, err := Spawn("echo -n " + s)
if err != nil {
t.Fatalf("Cannot exec rkt: %v", err)
}
searchPattern := `I should not find you`
result, out, err := p.ExpectRegexFindWithOutput(searchPattern)
if err == nil {
t.Fatalf("Shouldn't have found `%v` in `%v`", searchPattern, out)
}
if s != out {
t.Fatalf("Child output didn't match: %s", out)
}
err = p.Wait()
if err != nil {
t.Fatalf("Child didn't terminate correctly: %v", err)
}
p, err = Spawn("echo You will find me")
if err != nil {
t.Fatalf("Cannot exec rkt: %v", err)
}
searchPattern = `.*(You will).*`
result, out, err = p.ExpectRegexFindWithOutput(searchPattern)
if err != nil || result[1] != "You will" {
t.Fatalf("Did not find pattern `%v` in `%v'\n", searchPattern, out)
}
err = p.Wait()
if err != nil {
t.Fatalf("Child didn't terminate correctly: %v", err)
}
}
func TestRegexTimeoutWithOutput(t *testing.T) {
t.Logf("Testing Regular Expression search with timeout and output...")
seconds := 2
timeout := time.Duration(seconds-1) * time.Second
p, err := Spawn(fmt.Sprintf("sh -c 'sleep %d && echo You find me'", seconds))
if err != nil {
t.Fatalf("Cannot exec rkt: %v", err)
}
searchPattern := `find me`
result, out, err := p.ExpectTimeoutRegexFindWithOutput(searchPattern, timeout)
if err == nil {
t.Fatalf("Shouldn't have finished call with result: %v", result)
}
seconds = 2
timeout = time.Duration(seconds+1) * time.Second
p, err = Spawn(fmt.Sprintf("sh -c 'sleep %d && echo You find me'", seconds))
if err != nil {
t.Fatalf("Cannot exec rkt: %v", err)
}
searchPattern = `find me`
result, out, err = p.ExpectTimeoutRegexFindWithOutput(searchPattern, timeout)
if err != nil {
t.Fatalf("Didn't find %v in output: %v", searchPattern, out)
}
}
func TestRegexFindNoExcessBytes(t *testing.T) {
t.Logf("Testing Regular Expressions returning output with no excess strings")
repeats := 50
tests := []struct {
desc string
loopBody string
searchPattern string
expectFullTmpl string
unmatchedData string
}{
{
desc: `matching lines line by line with $ at the end of the regexp`,
loopBody: `echo "prefix: ${i} line"`,
searchPattern: `(?m)^prefix:\s+(\d+) line\s??$`,
expectFullTmpl: `prefix: %d line`,
unmatchedData: "\n",
// the "$" char at the end of regexp does not
// match the \n, so it is left as an unmatched
// data
},
{
desc: `matching lines line by line with \n at the end of the regexp`,
loopBody: `echo "prefix: ${i} line"`,
searchPattern: `(?m)^prefix:\s+(\d+) line\s??\n`,
expectFullTmpl: `prefix: %d line`,
unmatchedData: "",
},
{
desc: `matching chunks in single line chunk by chunk`,
loopBody: `printf "a ${i} b"`,
searchPattern: `a\s+(\d+)\s+b`,
expectFullTmpl: `a %d b`,
unmatchedData: "",
},
}
seqCmd := fmt.Sprintf("`seq 1 %d`", repeats)
shCmdTmpl := fmt.Sprintf(`sh -c 'for i in %s; do %%s; done'`, seqCmd)
for _, tt := range tests {
t.Logf("Test: %s", tt.desc)
shCmd := fmt.Sprintf(shCmdTmpl, tt.loopBody)
t.Logf("Running command: %s", shCmd)
p, err := Spawn(shCmd)
if err != nil {
t.Fatalf("Cannot exec shell script: %v", err)
}
defer func() {
if err := p.Wait(); err != nil {
t.Fatalf("shell script didn't terminate correctly: %v", err)
}
}()
for i := 1; i <= repeats; i++ {
matches, output, err := p.ExpectRegexFindWithOutput(tt.searchPattern)
if err != nil {
t.Fatalf("Failed to get the match number %d: %v", i, err)
}
if len(matches) != 2 {
t.Fatalf("Expected only 2 matches, got %d", len(matches))
}
full := strings.TrimSpace(matches[0])
expFull := fmt.Sprintf(tt.expectFullTmpl, i)
partial := matches[1]
expPartial := fmt.Sprintf("%d", i)
if full != expFull {
t.Fatalf("Did not the expected full match %q, got %q", expFull, full)
}
if partial != expPartial {
t.Fatalf("Did not the expected partial match %q, got %q", expPartial, partial)
}
// The output variable usually contains the
// unmatched data followed by the whole match.
// The first line is special as it has no data
// preceding it.
var expectedOutput string
if i == 1 || tt.unmatchedData == "" {
expectedOutput = matches[0]
} else {
expectedOutput = fmt.Sprintf("%s%s", tt.unmatchedData, matches[0])
}
if output != expectedOutput {
t.Fatalf("The collected output %q should be the same as the whole match %q", output, expectedOutput)
}
}
}
}
func TestBufferReadRune(t *testing.T) {
tests := []struct {
bufferContent []byte
fileContent []byte
expectedRune rune
}{
// unicode "copyright char" is \u00a9 is encoded as two bytes in utf8 0xc2 0xa9
{[]byte{0xc2, 0xa9}, []byte{}, '\u00a9'}, // whole rune is already in buffer.b
{[]byte{0xc2}, []byte{0xa9}, '\u00a9'}, // half of is in the buffer.b and another half still in buffer.f (file)
{[]byte{}, []byte{0xc2, 0xa9}, '\u00a9'}, // whole rune is the file
// some random noise in the end of file
{[]byte{0xc2, 0xa9}, []byte{0x20, 0x20, 0x20, 0x20}, '\u00a9'},
{[]byte{0xc2}, []byte{0xa9, 0x20, 0x20, 0x20, 0x20}, '\u00a9'},
{[]byte{}, []byte{0xc2, 0xa9, 0x20, 0x20, 0x20, 0x20}, '\u00a9'},
}
for i, tt := range tests {
// prepare tmp file with fileContent
f, err := ioutil.TempFile("", "")
if err != nil {
t.Fatal(err)
}
n, err := f.Write(tt.fileContent)
if err != nil {
t.Fatal(err)
}
if n != len(tt.fileContent) {
t.Fatal("expected fileContent written to temp file")
}
_, err = f.Seek(0, 0)
if err != nil {
t.Fatal(err)
}
// new buffer
buf := buffer{f: f, b: *bytes.NewBuffer(tt.bufferContent)}
// call ReadRune
r, size, err := buf.ReadRune()
if r != tt.expectedRune {
t.Fatalf("#%d: expected rune %+q but go is %+q", i, tt.expectedRune, r)
}
if size != len(string(tt.expectedRune)) {
t.Fatalf("#%d: expected rune %d bytes long but got just %d bytes long", i, len(string(tt.expectedRune)), size)
}
}
}

View File

@ -1,2 +0,0 @@
.DS_Store
doc

View File

@ -1,223 +0,0 @@
memo = "b40e77e679fec09015bfda27b7d1fc37f4ba6240cbe95cf9fb88b5c56e40ebdf"
[[projects]]
branch = "master"
name = "github.com/ThomasRooney/gexpect"
packages = ["."]
revision = "5482f03509440585d13d8f648989e05903001842"
[[projects]]
branch = "master"
name = "github.com/Xe/x"
packages = ["tools/glue/libs/gluaexpect","tools/glue/libs/gluasimplebox"]
revision = "d0ebe3970f361daa31a135f1e0c7304eb1442f61"
[[projects]]
branch = "master"
name = "github.com/ailncode/gluaxmlpath"
packages = ["."]
revision = "6ce478ecb4a60c4fc8929838e0b21b7fb7ca7440"
[[projects]]
branch = "master"
name = "github.com/brandur/simplebox"
packages = ["."]
revision = "84e9865bb03ad38c464043bf5382ce8c68ca5f0c"
[[projects]]
branch = "master"
name = "github.com/cjoudrey/gluahttp"
packages = ["."]
revision = "b4bfe0c50fea948dcbf3966e120996d6607bbd89"
[[projects]]
branch = "master"
name = "github.com/cjoudrey/gluaurl"
packages = ["."]
revision = "31cbb9bef199454415879f2e6d609d1136d60cad"
[[projects]]
branch = "master"
name = "github.com/cyberdelia/heroku-go"
packages = ["v3"]
revision = "bb8b6b1e9656ec0728638961f8e8b4211fee735d"
[[projects]]
branch = "master"
name = "github.com/dickeyxxx/netrc"
packages = ["."]
revision = "3acf1b3de25d89c7688c63bb45f6b07f566555ec"
[[projects]]
branch = "master"
name = "github.com/google/go-querystring"
packages = ["query"]
revision = "53e6ce116135b80d037921a7fdd5138cf32d7a8a"
[[projects]]
branch = "master"
name = "github.com/howeyc/gopass"
packages = ["."]
revision = "bf9dde6d0d2c004a008c27aaee91170c786f6db8"
[[projects]]
branch = "master"
name = "github.com/kballard/go-shellquote"
packages = ["."]
revision = "d8ec1a69a250a17bb0e419c386eac1f3711dc142"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluaenv"
packages = ["."]
revision = "2888db6bbe38923d59c42e443895875cc8ce0820"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluafs"
packages = ["."]
revision = "01391ed2d7ab89dc80157605b073403f960aa223"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluaquestion"
packages = ["."]
revision = "311437c29ba54d027ad2af383661725ae2bfdcdc"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluassh"
packages = ["."]
revision = "2a7bd48d7568de8230c87ac1ef4a4c481e45814d"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluatemplate"
packages = ["."]
revision = "d9e2c9d6b00f069a9da377a9ac529c827c1c7d71"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluayaml"
packages = ["."]
revision = "6fe413d49d73d785510ecf1529991ab0573e96c7"
[[projects]]
branch = "master"
name = "github.com/kr/fs"
packages = ["."]
revision = "2788f0dbd16903de03cb8186e5c7d97b69ad387b"
[[projects]]
branch = "master"
name = "github.com/kr/pty"
packages = ["."]
revision = "ce7fa45920dc37a92de8377972e52bc55ffa8d57"
[[projects]]
branch = "master"
name = "github.com/mattn/go-runewidth"
packages = ["."]
revision = "737072b4e32b7a5018b4a7125da8d12de90e8045"
[[projects]]
branch = "master"
name = "github.com/mitchellh/mapstructure"
packages = ["."]
revision = "cc8532a8e9a55ea36402aa21efdf403a60d34096"
[[projects]]
branch = "master"
name = "github.com/olekukonko/tablewriter"
packages = ["."]
revision = "44e365d423f4f06769182abfeeae2b91be9d529b"
[[projects]]
branch = "master"
name = "github.com/otm/gluaflag"
packages = ["."]
revision = "078088de689148194436293886e8e39809167332"
[[projects]]
branch = "master"
name = "github.com/otm/gluash"
packages = ["."]
revision = "e145c563986f0b91f740a758a84bca46c163aec7"
[[projects]]
name = "github.com/pborman/uuid"
packages = ["."]
revision = "e790cca94e6cc75c7064b1332e63811d4aae1a53"
version = "v1.1"
[[projects]]
branch = "master"
name = "github.com/pkg/sftp"
packages = ["."]
revision = "e84cc8c755ca39b7b64f510fe1fffc1b51f210a5"
[[projects]]
branch = "master"
name = "github.com/yookoala/realpath"
packages = ["."]
revision = "c416d99ab5ed256fa30c1f3bab73152deb59bb69"
[[projects]]
branch = "master"
name = "github.com/yuin/gluamapper"
packages = ["."]
revision = "d836955830e75240d46ce9f0e6d148d94f2e1d3a"
[[projects]]
branch = "master"
name = "github.com/yuin/gluare"
packages = ["."]
revision = "8e2742cd1bf2b904720ac66eca3c2091b2ea0720"
[[projects]]
branch = "master"
name = "github.com/yuin/gopher-lua"
packages = [".","ast","parse","pm"]
revision = "33ebc07735566cd0c3c4b69e2839d522cc389852"
[[projects]]
branch = "master"
name = "golang.org/x/crypto"
packages = ["curve25519","ed25519","ed25519/internal/edwards25519","nacl/secretbox","poly1305","salsa20/salsa","ssh","ssh/agent","ssh/terminal"]
revision = "dd85ac7e6a88fc6ca420478e934de5f1a42dd3c6"
[[projects]]
branch = "master"
name = "golang.org/x/net"
packages = ["html","html/atom"]
revision = "66aacef3dd8a676686c7ae3716979581e8b03c47"
[[projects]]
branch = "master"
name = "golang.org/x/sys"
packages = ["unix"]
revision = "9ccfe848b9db8435a24c424abbc07a921adf1df5"
[[projects]]
branch = "v2"
name = "gopkg.in/xmlpath.v2"
packages = ["."]
revision = "860cbeca3ebcc600db0b213c0e83ad6ce91f5739"
[[projects]]
branch = "master"
name = "gopkg.in/yaml.v2"
packages = ["."]
revision = "cd8b52f8269e0feb286dfeef29f8fe4d5b397e0b"
[[projects]]
branch = "master"
name = "layeh.com/gopher-json"
packages = ["."]
revision = "c128cc74278be889c4381681712931976fe0d88b"
[[projects]]
branch = "master"
name = "layeh.com/gopher-luar"
packages = ["."]
revision = "80196fe2abc5682963fc7a5261f5a5d77509938b"

View File

@ -1,72 +0,0 @@
[[dependencies]]
branch = "master"
name = "github.com/Xe/x"
[[dependencies]]
branch = "master"
name = "github.com/ailncode/gluaxmlpath"
[[dependencies]]
branch = "master"
name = "github.com/cjoudrey/gluahttp"
[[dependencies]]
branch = "master"
name = "github.com/cjoudrey/gluaurl"
[[dependencies]]
branch = "master"
name = "github.com/dickeyxxx/netrc"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluaenv"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluafs"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluaquestion"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluassh"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluatemplate"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluayaml"
[[dependencies]]
branch = "master"
name = "github.com/olekukonko/tablewriter"
[[dependencies]]
branch = "master"
name = "github.com/otm/gluaflag"
[[dependencies]]
branch = "master"
name = "github.com/otm/gluash"
[[dependencies]]
branch = "master"
name = "github.com/yuin/gluare"
[[dependencies]]
branch = "master"
name = "github.com/yuin/gopher-lua"
[[dependencies]]
branch = "master"
name = "layeh.com/gopher-json"
[[dependencies]]
branch = "master"
name = "layeh.com/gopher-luar"

121
vendor/github.com/Xe/eclier/LICENSE generated vendored
View File

@ -1,121 +0,0 @@
Creative Commons Legal Code
CC0 1.0 Universal
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
HEREUNDER.
Statement of Purpose
The laws of most jurisdictions throughout the world automatically confer
exclusive Copyright and Related Rights (defined below) upon the creator
and subsequent owner(s) (each and all, an "owner") of an original work of
authorship and/or a database (each, a "Work").
Certain owners wish to permanently relinquish those rights to a Work for
the purpose of contributing to a commons of creative, cultural and
scientific works ("Commons") that the public can reliably and without fear
of later claims of infringement build upon, modify, incorporate in other
works, reuse and redistribute as freely as possible in any form whatsoever
and for any purposes, including without limitation commercial purposes.
These owners may contribute to the Commons to promote the ideal of a free
culture and the further production of creative, cultural and scientific
works, or to gain reputation or greater distribution for their Work in
part through the use and efforts of others.
For these and/or other purposes and motivations, and without any
expectation of additional consideration or compensation, the person
associating CC0 with a Work (the "Affirmer"), to the extent that he or she
is an owner of Copyright and Related Rights in the Work, voluntarily
elects to apply CC0 to the Work and publicly distribute the Work under its
terms, with knowledge of his or her Copyright and Related Rights in the
Work and the meaning and intended legal effect of CC0 on those rights.
1. Copyright and Related Rights. A Work made available under CC0 may be
protected by copyright and related or neighboring rights ("Copyright and
Related Rights"). Copyright and Related Rights include, but are not
limited to, the following:
i. the right to reproduce, adapt, distribute, perform, display,
communicate, and translate a Work;
ii. moral rights retained by the original author(s) and/or performer(s);
iii. publicity and privacy rights pertaining to a person's image or
likeness depicted in a Work;
iv. rights protecting against unfair competition in regards to a Work,
subject to the limitations in paragraph 4(a), below;
v. rights protecting the extraction, dissemination, use and reuse of data
in a Work;
vi. database rights (such as those arising under Directive 96/9/EC of the
European Parliament and of the Council of 11 March 1996 on the legal
protection of databases, and under any national implementation
thereof, including any amended or successor version of such
directive); and
vii. other similar, equivalent or corresponding rights throughout the
world based on applicable law or treaty, and any national
implementations thereof.
2. Waiver. To the greatest extent permitted by, but not in contravention
of, applicable law, Affirmer hereby overtly, fully, permanently,
irrevocably and unconditionally waives, abandons, and surrenders all of
Affirmer's Copyright and Related Rights and associated claims and causes
of action, whether now known or unknown (including existing as well as
future claims and causes of action), in the Work (i) in all territories
worldwide, (ii) for the maximum duration provided by applicable law or
treaty (including future time extensions), (iii) in any current or future
medium and for any number of copies, and (iv) for any purpose whatsoever,
including without limitation commercial, advertising or promotional
purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
member of the public at large and to the detriment of Affirmer's heirs and
successors, fully intending that such Waiver shall not be subject to
revocation, rescission, cancellation, termination, or any other legal or
equitable action to disrupt the quiet enjoyment of the Work by the public
as contemplated by Affirmer's express Statement of Purpose.
3. Public License Fallback. Should any part of the Waiver for any reason
be judged legally invalid or ineffective under applicable law, then the
Waiver shall be preserved to the maximum extent permitted taking into
account Affirmer's express Statement of Purpose. In addition, to the
extent the Waiver is so judged Affirmer hereby grants to each affected
person a royalty-free, non transferable, non sublicensable, non exclusive,
irrevocable and unconditional license to exercise Affirmer's Copyright and
Related Rights in the Work (i) in all territories worldwide, (ii) for the
maximum duration provided by applicable law or treaty (including future
time extensions), (iii) in any current or future medium and for any number
of copies, and (iv) for any purpose whatsoever, including without
limitation commercial, advertising or promotional purposes (the
"License"). The License shall be deemed effective as of the date CC0 was
applied by Affirmer to the Work. Should any part of the License for any
reason be judged legally invalid or ineffective under applicable law, such
partial invalidity or ineffectiveness shall not invalidate the remainder
of the License, and in such case Affirmer hereby affirms that he or she
will not (i) exercise any of his or her remaining Copyright and Related
Rights in the Work or (ii) assert any associated claims and causes of
action with respect to the Work, in either case contrary to Affirmer's
express Statement of Purpose.
4. Limitations and Disclaimers.
a. No trademark or patent rights held by Affirmer are waived, abandoned,
surrendered, licensed or otherwise affected by this document.
b. Affirmer offers the Work as-is and makes no representations or
warranties of any kind concerning the Work, express, implied,
statutory or otherwise, including without limitation warranties of
title, merchantability, fitness for a particular purpose, non
infringement, or the absence of latent or other defects, accuracy, or
the present or absence of errors, whether or not discoverable, all to
the greatest extent permissible under applicable law.
c. Affirmer disclaims responsibility for clearing rights of other persons
that may apply to the Work or any use thereof, including without
limitation any person's Copyright and Related Rights in the Work.
Further, Affirmer disclaims responsibility for obtaining any necessary
consents, permissions or other rights required for any use of the
Work.
d. Affirmer understands and acknowledges that Creative Commons is not a
party to this document and has no duty or obligation with respect to
this CC0 or use of the Work.

View File

@ -1,38 +0,0 @@
# eclier
Pronounced like eclair
The core of a command line application allowing for trivial user extension.
Every command and subcommand is its own `.lua` file that is either shipped as
part of the built-in cartridge of commands or a plugin that the user installs.
The core contains the following:
- A module loading system for preparing different commands for use
- The core subcommand router
## How to write plugins
Create a new file in the script home named after the plugin subcommand, for
example: `scripts/hello.lua`:
```lua
script.verb = "hello"
script.help = "prints everyone's favorite hello world message"
script.author = "Xe" -- put your github username here
script.version = "1.0"
script.usage = ""
function(run)
print "Hello, world!"
end
```
And then run it using the example shell cli:
```console
~/go/src/github.com/Xe/eclier:master λ go run ./example/main.go hello
Hello, world!
```

View File

@ -1,119 +0,0 @@
package eclier
import (
"context"
"flag"
"os"
"github.com/olekukonko/tablewriter"
)
// Constants for built-in commands.
const (
BuiltinScriptPath = "<built-in>"
BuiltinAuthor = "<built-in>"
BuiltinVersion = "<built-in>"
)
type pluginCommand struct {
r *Router
fs *flag.FlagSet
dontShowBuiltin *bool
}
// Close is a no-op.
func (p *pluginCommand) Close() error { return nil }
// Init sets up the flags for this command.
func (p *pluginCommand) Init() {
p.fs = flag.NewFlagSet(p.Verb(), flag.ExitOnError)
p.dontShowBuiltin = p.fs.Bool("no-builtin", false, "if set, don't show built-in commands")
}
// ScriptPath returns the built-in script path.
func (p *pluginCommand) ScriptPath() string { return BuiltinScriptPath }
// Verb returns the command verb.
func (p *pluginCommand) Verb() string { return "plugins" }
// Help returns the command help
func (p *pluginCommand) Help() string {
return `plugin lists all of the loaded commands and their script paths.`
}
func (p *pluginCommand) Usage() string {
return ` -no-builtin
if set, don't show built-in commands`
}
func (p *pluginCommand) Author() string { return BuiltinAuthor }
func (p *pluginCommand) Version() string { return BuiltinVersion }
// Run executes the command.
func (p *pluginCommand) Run(ctx context.Context, arg []string) error {
p.fs.Parse(arg)
table := tablewriter.NewWriter(os.Stdout)
table.SetHeader([]string{"Verb", "Path"})
for _, c := range p.r.cmds {
if c.ScriptPath() == BuiltinScriptPath && *p.dontShowBuiltin {
continue
}
table.Append([]string{c.Verb(), c.ScriptPath()})
}
table.Render()
return nil
}
// NewBuiltinCommand makes it easier to write core commands for eclier.
func NewBuiltinCommand(verb, help, usage string, doer func(context.Context, []string) error) Command {
return &commandFunc{
verb: verb,
help: help,
usage: usage,
doer: doer,
}
}
// commandFunc is a simple alias for creating builtin commands.
type commandFunc struct {
verb string
help string
usage string
doer func(context.Context, []string) error
}
// Close deallocates resources set up by the initialization of the command.
func (c *commandFunc) Close() error { return nil }
// Init is a no-op.
func (c *commandFunc) Init() {}
// ScriptPath returns the built-in script path.
func (c *commandFunc) ScriptPath() string { return BuiltinScriptPath }
// Verb returns the command verb.
func (c *commandFunc) Verb() string { return c.verb }
// Help returns the command help.
func (c *commandFunc) Help() string { return c.help }
// Usage returns the command usage.
func (c *commandFunc) Usage() string { return c.usage }
// Author returns the built-in author.
func (c *commandFunc) Author() string { return BuiltinAuthor }
// Version returns the built-in version.
func (c *commandFunc) Version() string { return BuiltinVersion }
// Run runs the command handler.
func (c *commandFunc) Run(ctx context.Context, arg []string) error {
return c.doer(ctx, arg)
}

View File

@ -1,18 +0,0 @@
package eclier
import (
"context"
)
// Command is an individual subcommand.
type Command interface {
Close() error
Init()
ScriptPath() string
Verb() string
Help() string
Usage() string
Author() string
Version() string
Run(ctx context.Context, arg []string) error
}

View File

@ -1,8 +0,0 @@
file = {
"./internal/gluanetrc/netrc.lua",
"./internal/gluaheroku/heroku.lua",
}
title = "eclier lua libraries"
project = "eclier"
description = "The lua libraries created for eclier demos and common utility."

214
vendor/github.com/Xe/eclier/router.go generated vendored
View File

@ -1,214 +0,0 @@
package eclier
import (
"context"
"fmt"
"log"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
"github.com/olekukonko/tablewriter"
lua "github.com/yuin/gopher-lua"
"layeh.com/asar"
)
// Router is the main subcommand router for eclier. At a high level what this is
// doing is similar to http.ServeMux, but for CLI commands instead of HTTP handlers.
type Router struct {
lock sync.Mutex
cmds map[string]Command
// configured data
gluaCreationHook func(*lua.LState)
scriptHomes []string
cartridge map[string]string
}
// NewRouter creates a new instance of Router and sets it up for use.
func NewRouter(opts ...RouterOption) (*Router, error) {
r := &Router{
cmds: map[string]Command{},
cartridge: map[string]string{},
}
for _, opt := range opts {
opt(r)
}
// scan r.scriptHome for lua scripts, load them into their own lua states and
// make a wrapper around them for the Command type.
for _, home := range r.scriptHomes {
err := filepath.Walk(home, func(path string, info os.FileInfo, err error) error {
if err != nil {
log.Printf("error in arg: %v", err)
return err
}
if strings.HasSuffix(info.Name(), ".lua") {
fname := filepath.Join(home, info.Name())
fin, err := os.Open(fname)
if err != nil {
return err
}
defer fin.Close()
c := newGluaCommand(r.gluaCreationHook, fname, fin)
r.cmds[c.Verb()] = c
}
return nil
})
if err != nil {
return nil, err
}
}
var helpCommand Command = NewBuiltinCommand("help", "shows help for subcommands", "help [subcommand]", func(ctx context.Context, arg []string) error {
if len(arg) == 0 {
table := tablewriter.NewWriter(os.Stdout)
table.SetHeader([]string{"Verb", "Author", "Version", "Help"})
for _, cmd := range r.cmds {
table.Append([]string{cmd.Verb(), cmd.Author(), cmd.Version(), cmd.Help()})
}
table.Render()
return nil
}
cmd, ok := r.cmds[arg[0]]
if !ok {
fmt.Printf("can't find help for %s", arg[0])
os.Exit(2)
}
fmt.Printf("Verb: %s\nAuthor: %s\nVersion: %s\nHelp: %s\nUsage: %s %s\n", cmd.Verb(), cmd.Author(), cmd.Version(), cmd.Help(), cmd.Verb(), cmd.Usage())
return nil
})
r.cmds["plugins"] = &pluginCommand{r: r}
r.cmds["help"] = helpCommand
return r, nil
}
// AddCommand adds a given command instance to the eclier router.
func (r *Router) AddCommand(cmd Command) {
r.lock.Lock()
defer r.lock.Unlock()
r.cmds[cmd.Verb()] = cmd
}
// Run executes a single command given in slot 0 of the argument array.
func (r *Router) Run(ctx context.Context, arg []string) error {
r.lock.Lock()
defer r.lock.Unlock()
if len(arg) == 0 {
fmt.Printf("please specify a subcommand, such as `%s help`\n", filepath.Base(os.Args[0]))
os.Exit(2)
}
cmd := arg[0]
arg = arg[1:]
ci, ok := r.cmds[cmd]
if !ok {
fmt.Printf("No such command %s could be run.\n", cmd)
os.Exit(2)
}
ci.Init()
return ci.Run(ctx, arg)
}
// RouterOption is a functional option for Router.
type RouterOption func(*Router)
// WithScriptHome sets the router's script home to the given directory. This is
// where lua files will be walked and parsed.
func WithScriptHome(dir string) RouterOption {
return func(r *Router) {
r.scriptHomes = append(r.scriptHomes, dir)
}
}
// WithGluaCreationHook adds a custom bit of code that runs every time a new
// gopher-lua LState is created. This allows users of this library to register
// custom libraries to the pile of states.
func WithGluaCreationHook(hook func(*lua.LState)) RouterOption {
return func(r *Router) {
r.gluaCreationHook = hook
}
}
// WithFilesystem loads a http.FileSystem full of lua scripts into this eclier
// router.
func WithFilesystem(shortName string, fs http.FileSystem) RouterOption {
return func(r *Router) {
fin, err := fs.Open("/")
if err != nil {
log.Fatal(err)
}
defer fin.Close()
childs, err := fin.Readdir(-1)
if err != nil {
log.Fatal(err)
}
for _, chl := range childs {
if strings.HasSuffix(chl.Name(), ".lua") {
fname := filepath.Join(shortName, chl.Name())
sFin, err := fs.Open(chl.Name())
if err != nil {
log.Fatal(err)
}
defer sFin.Close()
c := newGluaCommand(r.gluaCreationHook, fname, sFin)
r.cmds[c.Verb()] = c
}
}
}
}
// WithAsarFile loads an asar file full of lua scripts into this eclier router.
func WithAsarFile(shortName, fname string) RouterOption {
return func(r *Router) {
fin, err := os.Open(fname)
if err != nil {
log.Fatal(err)
}
defer fin.Close()
e, err := asar.Decode(fin)
if err != nil {
log.Fatal(err)
}
err = e.Walk(func(path string, info os.FileInfo, err error) error {
if strings.HasSuffix(info.Name(), ".lua") {
fname := filepath.Join(shortName, "::", path)
fin := e.Find(path)
if fin == nil {
return nil
}
c := newGluaCommand(r.gluaCreationHook, fname, fin.Open())
r.cmds[c.Verb()] = c
}
return nil
})
if err != nil {
log.Fatal(err)
}
}
}

View File

@ -1,92 +0,0 @@
package eclier
import (
"context"
"errors"
"io"
"io/ioutil"
"sync"
lua "github.com/yuin/gopher-lua"
luar "layeh.com/gopher-luar"
)
type Script struct {
Verb string
Help string
Usage string
Author string
Version string
}
type gluaCommand struct {
sync.Mutex
script *Script
L *lua.LState
filename string
}
func newGluaCommand(preload func(*lua.LState), filename string, r io.Reader) Command {
L := lua.NewState()
preload(L)
script := &Script{}
L.SetGlobal("script", luar.New(L, script))
data, err := ioutil.ReadAll(r)
if err != nil {
panic(err)
}
err = L.DoString(string(data))
if err != nil {
panic(err)
}
return &gluaCommand{script: script, L: L, filename: filename}
}
func (g *gluaCommand) Close() error {
g.L.Close()
return nil
}
func (g *gluaCommand) Init() {}
func (g *gluaCommand) ScriptPath() string { return g.filename }
func (g *gluaCommand) Verb() string { return g.script.Verb }
func (g *gluaCommand) Help() string { return g.script.Help }
func (g *gluaCommand) Usage() string { return g.script.Usage }
func (g *gluaCommand) Author() string { return g.script.Author }
func (g *gluaCommand) Version() string { return g.script.Version }
func (g *gluaCommand) Run(ctx context.Context, arg []string) error {
runf := g.L.GetGlobal("run")
if runf.Type() == lua.LTNil {
return errors.New("no global function run in this script")
}
tab := g.L.NewTable()
for _, a := range arg {
tab.Append(lua.LString(a))
}
err := g.L.CallByParam(lua.P{
Fn: runf,
NRet: 0,
Protect: false,
}, tab)
if err != nil {
panic(err)
}
return nil
}

View File

@ -1,19 +0,0 @@
Copyright (c) 2017 Christine Dodrill <me@christine.website>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,7 +0,0 @@
file = {
"./netrc.lua",
}
title = "gluanetrc"
project = "Xe/gluanetrc"
description = "netrc offers a simple interface to a user's netrc file in their home directory"

View File

@ -1,81 +0,0 @@
package gluanetrc
import (
"os"
"path/filepath"
"github.com/dickeyxxx/netrc"
lua "github.com/yuin/gopher-lua"
luar "layeh.com/gopher-luar"
)
var n *netrc.Netrc
func init() {
var err error
fname := filepath.Join(os.Getenv("HOME"), ".netrc")
fout, err := os.Create(fname)
if err != nil {
panic(err)
}
fout.Close()
n, err = netrc.Parse(filepath.Join(fname))
if err != nil {
panic(err)
}
}
var exports = map[string]lua.LGFunction{
"machine": machine,
"save": save,
"remove_machine": removeMachine,
"add_machine": addMachine,
}
func addMachine(L *lua.LState) int {
name := L.ToString(1)
login := L.ToString(2)
password := L.ToString(3)
n.AddMachine(name, login, password)
L.Push(luar.New(L, n.Machine(name)))
return 1
}
func removeMachine(L *lua.LState) int {
name := L.ToString(1)
n.RemoveMachine(name)
return 0
}
func machine(L *lua.LState) int {
name := L.ToString(1)
m := n.Machine(string(name))
L.Push(luar.New(L, m))
return 1
}
func save(L *lua.LState) int {
n.Save()
return 0
}
// Preload loads netrc into a gopher-lua's LState module registry.
func Preload(L *lua.LState) {
L.PreloadModule("netrc", Loader)
}
// Loader loads the netrc modules.
func Loader(L *lua.LState) int {
mod := L.SetFuncs(L.NewTable(), exports)
L.Push(mod)
return 1
}

View File

@ -1,59 +0,0 @@
--- Module netrc offers a simple interface to a user's netrc file in their home directory.
-- @module netrc
local netrc = {}
--- add_machine adds a machine to the netrc manifest with a username and password.
-- @param name string the domain name of the machine
-- @param login string the user name to log in as
-- @param password string the password or similar secret for the machine
-- @return Machine
function netrc.add_machine(name, login, password)
end
--- machine loads netrc data for a given machine by domain name.
-- Any changes made with the `set` method of a machine will be saved to the disk
-- when the module's `save` function is called. If the given machine does not
-- exist in the netrc file, this function will return nil.
-- @param name string
-- @return Machine
-- @usage local creds = netrc.machine("api.foobar.com")
-- @usage print(creds:get("username"), creds:get("password"))
function netrc.machine(name)
return nil
end
--- remove_machine removes a single machine from the netrc manifest by name.
-- @param name string the name of the machine to remove from the netrc manifest
-- @usage netrc.remove_machine("api.digg.com")
function netrc.remove_machine(name)
end
--- save writes all changes made in machine `set` methods to the disk at $HOME/.netrc.
-- This function will raise a lua error if the save fails. This function should
-- not fail in the course of normal operation.
-- @usage netrc.save()
function netrc.save()
end
--- Machine is a userdata wrapper around the go netrc.Machine type.
-- https://godoc.org/github.com/dickeyxxx/netrc#Machine
-- @type Machine
local Machine = {}
--- get gets a Machine value by key.
-- @param key the netrc key to get
-- @return string the value from the netrc
-- @usage local cli = api.new(m:get("login"), m:get("password"))
function Machine:get(key)
end
--- set updates information in this Machine by a key, value pair.
-- @param key the netrc key to set
-- @param value the value to set the above key to
-- @usage m:set("password", "hunter2")
function Machine:set(key, value)
end
return netrc

26
vendor/github.com/Xe/x/.gitignore generated vendored
View File

@ -1,26 +0,0 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test
*.prof
.env

6
vendor/github.com/Xe/x/BLESSING generated vendored
View File

@ -1,6 +0,0 @@
The author disclaims copyright to this source code. In place of
a legal notice, here is a blessing:
May you do good and not evil.
May you find forgiveness for yourself and forgive others.
May you share freely, never taking more than you give.

121
vendor/github.com/Xe/x/LICENSE generated vendored
View File

@ -1,121 +0,0 @@
Creative Commons Legal Code
CC0 1.0 Universal
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
HEREUNDER.
Statement of Purpose
The laws of most jurisdictions throughout the world automatically confer
exclusive Copyright and Related Rights (defined below) upon the creator
and subsequent owner(s) (each and all, an "owner") of an original work of
authorship and/or a database (each, a "Work").
Certain owners wish to permanently relinquish those rights to a Work for
the purpose of contributing to a commons of creative, cultural and
scientific works ("Commons") that the public can reliably and without fear
of later claims of infringement build upon, modify, incorporate in other
works, reuse and redistribute as freely as possible in any form whatsoever
and for any purposes, including without limitation commercial purposes.
These owners may contribute to the Commons to promote the ideal of a free
culture and the further production of creative, cultural and scientific
works, or to gain reputation or greater distribution for their Work in
part through the use and efforts of others.
For these and/or other purposes and motivations, and without any
expectation of additional consideration or compensation, the person
associating CC0 with a Work (the "Affirmer"), to the extent that he or she
is an owner of Copyright and Related Rights in the Work, voluntarily
elects to apply CC0 to the Work and publicly distribute the Work under its
terms, with knowledge of his or her Copyright and Related Rights in the
Work and the meaning and intended legal effect of CC0 on those rights.
1. Copyright and Related Rights. A Work made available under CC0 may be
protected by copyright and related or neighboring rights ("Copyright and
Related Rights"). Copyright and Related Rights include, but are not
limited to, the following:
i. the right to reproduce, adapt, distribute, perform, display,
communicate, and translate a Work;
ii. moral rights retained by the original author(s) and/or performer(s);
iii. publicity and privacy rights pertaining to a person's image or
likeness depicted in a Work;
iv. rights protecting against unfair competition in regards to a Work,
subject to the limitations in paragraph 4(a), below;
v. rights protecting the extraction, dissemination, use and reuse of data
in a Work;
vi. database rights (such as those arising under Directive 96/9/EC of the
European Parliament and of the Council of 11 March 1996 on the legal
protection of databases, and under any national implementation
thereof, including any amended or successor version of such
directive); and
vii. other similar, equivalent or corresponding rights throughout the
world based on applicable law or treaty, and any national
implementations thereof.
2. Waiver. To the greatest extent permitted by, but not in contravention
of, applicable law, Affirmer hereby overtly, fully, permanently,
irrevocably and unconditionally waives, abandons, and surrenders all of
Affirmer's Copyright and Related Rights and associated claims and causes
of action, whether now known or unknown (including existing as well as
future claims and causes of action), in the Work (i) in all territories
worldwide, (ii) for the maximum duration provided by applicable law or
treaty (including future time extensions), (iii) in any current or future
medium and for any number of copies, and (iv) for any purpose whatsoever,
including without limitation commercial, advertising or promotional
purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
member of the public at large and to the detriment of Affirmer's heirs and
successors, fully intending that such Waiver shall not be subject to
revocation, rescission, cancellation, termination, or any other legal or
equitable action to disrupt the quiet enjoyment of the Work by the public
as contemplated by Affirmer's express Statement of Purpose.
3. Public License Fallback. Should any part of the Waiver for any reason
be judged legally invalid or ineffective under applicable law, then the
Waiver shall be preserved to the maximum extent permitted taking into
account Affirmer's express Statement of Purpose. In addition, to the
extent the Waiver is so judged Affirmer hereby grants to each affected
person a royalty-free, non transferable, non sublicensable, non exclusive,
irrevocable and unconditional license to exercise Affirmer's Copyright and
Related Rights in the Work (i) in all territories worldwide, (ii) for the
maximum duration provided by applicable law or treaty (including future
time extensions), (iii) in any current or future medium and for any number
of copies, and (iv) for any purpose whatsoever, including without
limitation commercial, advertising or promotional purposes (the
"License"). The License shall be deemed effective as of the date CC0 was
applied by Affirmer to the Work. Should any part of the License for any
reason be judged legally invalid or ineffective under applicable law, such
partial invalidity or ineffectiveness shall not invalidate the remainder
of the License, and in such case Affirmer hereby affirms that he or she
will not (i) exercise any of his or her remaining Copyright and Related
Rights in the Work or (ii) assert any associated claims and causes of
action with respect to the Work, in either case contrary to Affirmer's
express Statement of Purpose.
4. Limitations and Disclaimers.
a. No trademark or patent rights held by Affirmer are waived, abandoned,
surrendered, licensed or otherwise affected by this document.
b. Affirmer offers the Work as-is and makes no representations or
warranties of any kind concerning the Work, express, implied,
statutory or otherwise, including without limitation warranties of
title, merchantability, fitness for a particular purpose, non
infringement, or the absence of latent or other defects, accuracy, or
the present or absence of errors, whether or not discoverable, all to
the greatest extent permissible under applicable law.
c. Affirmer disclaims responsibility for clearing rights of other persons
that may apply to the Work or any use thereof, including without
limitation any person's Copyright and Related Rights in the Work.
Further, Affirmer disclaims responsibility for obtaining any necessary
consents, permissions or other rights required for any use of the
Work.
d. Affirmer understands and acknowledges that Creative Commons is not a
party to this document and has no duty or obligation with respect to
this CC0 or use of the Work.

126
vendor/github.com/Xe/x/README.md generated vendored
View File

@ -1,126 +0,0 @@
# tools
Various tools of mine in Go
Installing these tools
----------------------
To install any of these tools, type in:
```console
$ go get christine.website/go/tools/$toolname
```
For example:
```console
$ go get christine.website/go/tools/license
```
`dokku`
-------
This is a simple command line tool to interface with Dokku servers. This is
a port of my shell extension
[`dokku.zsh`](https://github.com/Xe/dotfiles/blob/master/.zsh/dokku.zsh) to
a nice Go binary.
This takes a configuration file for defining multiple servers:
```ini
[server "default"]
user = dokku
host = panel.apps.xeserv.us
sshkey = /.ssh/id_rsa
```
By default it will imply that the SSH key is `~/.ssh/id_rsa` and that the
username is `dokku`. By default the server named `default` will be used for
command execution.
### TODO
- [ ] Allow interactive commands
- [ ] Directly pipe stdin and stdout to the ssh connection
---
`license`
---------
This is a simple command line tool to help users generate a license file based
on information they have already given their system and is easy for the system
to figure out on its own.
```console
$ license
Usage of license:
license [options] <license kind>
-email="": email of the person licensing the software
-name="": name of the person licensing the software
-out=false: write to a file instead of stdout
-show=false: show all licenses instead of generating one
By default the name and email are scraped from `git config`
```
```console
$ license -show
Licenses available:
zlib
unlicense
mit
apache
bsd-2
gpl-2
```
```console
$ license zlib
Copyright (c) 2015 Christine Dodrill <xena@yolo-swag.com>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgement in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
```
---
`ghstat`
--------
Command ghstat shows the status of GitHub via their status API.
Usage of ./ghstat:
-message=false: show last message?
This follows https://status.github.com/api for all but the list of all recent
status messages.
```console
$ ghstat
Status: minor (Fri Mar 27 15:24:57 2015)
```
```console
$ ghstat -message
Last message:
Status: minor
Message: We've deployed our volumetric attack defenses against an extremely
large amount of traffic. Performance is stabilizing.
Time: Fri Mar 27 15:04:59 2015
```

View File

@ -1,661 +0,0 @@
## `json`
```lua
local json = require "json"
```
Json encoder/decoder
The following functions are exposed by the library:
decode(string): Decodes a JSON string. Returns nil and an error string if
the string could not be decoded.
encode(value): Encodes a value into a JSON string. Returns nil and an error
string if the value could not be encoded.
## `xmlpath`
```lua
local xmlpath = require "xmlpath"
```
XMLPath style iteration
xml ="<bookist><book>x1</book><book>x2</book><book>x3</book></booklist>"
local xmlpath = require("xmlpath")
node,err = xmlpath.loadxml(xml)
path,err = xmlpath.compile("//book")
it = path:iter(node)
for k,v in pairs(it) do
print(k,v:string())
end
## `http`
```lua
local http = require("http")
```
HTTP client library
### API
- [`http.delete(url [, options])`](#httpdeleteurl--options)
- [`http.get(url [, options])`](#httpgeturl--options)
- [`http.head(url [, options])`](#httpheadurl--options)
- [`http.patch(url [, options])`](#httppatchurl--options)
- [`http.post(url [, options])`](#httpposturl--options)
- [`http.put(url [, options])`](#httpputurl--options)
- [`http.request(method, url [, options])`](#httprequestmethod-url--options)
- [`http.request_batch(requests)`](#httprequest_batchrequests)
- [`http.response`](#httpresponse)
#### http.delete(url [, options])
**Attributes**
| Name | Type | Description |
| ------- | ------ | ----------- |
| url | String | URL of the resource to load |
| options | Table | Additional options |
**Options**
| Name | Type | Description |
| ------- | ------ | ----------- |
| query | String | URL encoded query params |
| cookies | Table | Additional cookies to send with the request |
| headers | Table | Additional headers to send with the request |
**Returns**
[http.response](#httpresponse) or (nil, error message)
#### http.get(url [, options])
**Attributes**
| Name | Type | Description |
| ------- | ------ | ----------- |
| url | String | URL of the resource to load |
| options | Table | Additional options |
**Options**
| Name | Type | Description |
| ------- | ------ | ----------- |
| query | String | URL encoded query params |
| cookies | Table | Additional cookies to send with the request |
| headers | Table | Additional headers to send with the request |
**Returns**
[http.response](#httpresponse) or (nil, error message)
#### http.head(url [, options])
**Attributes**
| Name | Type | Description |
| ------- | ------ | ----------- |
| url | String | URL of the resource to load |
| options | Table | Additional options |
**Options**
| Name | Type | Description |
| ------- | ------ | ----------- |
| query | String | URL encoded query params |
| cookies | Table | Additional cookies to send with the request |
| headers | Table | Additional headers to send with the request |
**Returns**
[http.response](#httpresponse) or (nil, error message)
#### http.patch(url [, options])
**Attributes**
| Name | Type | Description |
| ------- | ------ | ----------- |
| url | String | URL of the resource to load |
| options | Table | Additional options |
**Options**
| Name | Type | Description |
| ------- | ------ | ----------- |
| query | String | URL encoded query params |
| cookies | Table | Additional cookies to send with the request |
| body | String | Request body. |
| form | String | Deprecated. URL encoded request body. This will also set the `Content-Type` header to `application/x-www-form-urlencoded` |
| headers | Table | Additional headers to send with the request |
**Returns**
[http.response](#httpresponse) or (nil, error message)
#### http.post(url [, options])
**Attributes**
| Name | Type | Description |
| ------- | ------ | ----------- |
| url | String | URL of the resource to load |
| options | Table | Additional options |
**Options**
| Name | Type | Description |
| ------- | ------ | ----------- |
| query | String | URL encoded query params |
| cookies | Table | Additional cookies to send with the request |
| body | String | Request body. |
| form | String | Deprecated. URL encoded request body. This will also set the `Content-Type` header to `application/x-www-form-urlencoded` |
| headers | Table | Additional headers to send with the request |
**Returns**
[http.response](#httpresponse) or (nil, error message)
#### http.put(url [, options])
**Attributes**
| Name | Type | Description |
| ------- | ------ | ----------- |
| url | String | URL of the resource to load |
| options | Table | Additional options |
**Options**
| Name | Type | Description |
| ------- | ------ | ----------- |
| query | String | URL encoded query params |
| cookies | Table | Additional cookies to send with the request |
| body | String | Request body. |
| form | String | Deprecated. URL encoded request body. This will also set the `Content-Type` header to `application/x-www-form-urlencoded` |
| headers | Table | Additional headers to send with the request |
**Returns**
[http.response](#httpresponse) or (nil, error message)
#### http.request(method, url [, options])
**Attributes**
| Name | Type | Description |
| ------- | ------ | ----------- |
| method | String | The HTTP request method |
| url | String | URL of the resource to load |
| options | Table | Additional options |
**Options**
| Name | Type | Description |
| ------- | ------ | ----------- |
| query | String | URL encoded query params |
| cookies | Table | Additional cookies to send with the request |
| body | String | Request body. |
| form | String | Deprecated. URL encoded request body. This will also set the `Content-Type` header to `application/x-www-form-urlencoded` |
| headers | Table | Additional headers to send with the request |
**Returns**
[http.response](#httpresponse) or (nil, error message)
#### http.request_batch(requests)
**Attributes**
| Name | Type | Description |
| -------- | ----- | ----------- |
| requests | Table | A table of requests to send. Each request item is by itself a table containing [http.request](#httprequestmethod-url--options) parameters for the request |
**Returns**
[[http.response](#httpresponse)] or ([[http.response](#httpresponse)], [error message])
#### http.response
The `http.response` table contains information about a completed HTTP request.
**Attributes**
| Name | Type | Description |
| ----------- | ------ | ----------- |
| body | String | The HTTP response body |
| body_size | Number | The size of the HTTP reponse body in bytes |
| headers | Table | The HTTP response headers |
| cookies | Table | The cookies sent by the server in the HTTP response |
| status_code | Number | The HTTP response status code |
| url | String | The final URL the request ended pointing to after redirects |
## `url`
```lua
local url = require "url"
```
URL parsing library
### API
- [`url.parse(url)`](#urlparseurl)
- [`url.build(options)`](#urlbuildoptions)
- [`url.build_query_string(query_params)`](#urlbuild_query_stringquery_params)
- [`url.resolve(from, to)`](#urlresolvefrom-to)
#### url.parse(url)
Parse URL into a table of key/value components.
**Attributes**
| Name | Type | Description |
| ------- | ------ | ----------- |
| url | String | URL to parsed |
**Returns**
Table with parsed URL or (nil, error message)
| Name | Type | Description |
| -------- | ------ | ----------- |
| scheme | String | Scheme of the URL |
| username | String | Username |
| password | String | Password |
| host | String | Host and port of the URL |
| path | String | Path |
| query | String | Query string |
| fragment | String | Fragment |
#### url.build(options)
Assemble a URL string from a table of URL components.
**Attributes**
| Name | Type | Description |
| ------- | ----- | ----------- |
| options | Table | Table with URL components, see [`url.parse`](#urlparseurl) for list of valid components |
**Returns**
String
#### url.build_query_string(query_params)
Assemble table of query string parameters into a string.
**Attributes**
| Name | Type | Description |
| ------------ | ----- | ----------- |
| query_params | Table | Table with query parameters |
**Returns**
String
#### url.resolve(from, to)
Take a base URL, and a href URL, and resolve them as a browser would for an anchor tag.
| Name | Type | Description |
| ---- | ------ | ----------- |
| from | String | base URL |
| to | String | href URL |
**Returns**
String or (nil, error message)
## `env`
```lua
local env = require "env"
```
Environment manipulation
### API
#### `env.set(key, value)`
Same `os.setenv`
#### `env.get(key)`
Same `os.getenv`
#### `env.loadfile(file)`
Loads environment variables from a file. The file is as the following:
```
AAA=BBB
CCC=DDD
```
If this function fails, it returns `nil`, plus a string describing the error.
## `fs`
```lua
local fs = require "fs"
```
Filesystem manipulation
### API
#### `fs.exists(file)`
Returns true if the file exists.
#### `fs.read(file)`
Reads file content and return it. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.write(file, content, [mode])`
Writes content to the file. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.mkdir(path, [mode, recursive])`
Create directory. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.remove(path, [recursive])`
Remove path. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.symlink(target, link)`
Create symbolic link. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.dirname(path)`
Returns all but the last element of path.
#### `fs.basename(path)`
Returns the last element of path.
#### `fs.realpath(path)`
Returns the real path of a given path in the os. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.getcwd()`
Returns the current working directory. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.chdir(path)`
Changes the current working directory. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.file()`
Returns the script file path. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.dir()`
Returns the directory path that is parent of the script file. If this function fails, it returns `nil`, plus a string describing the error.
#### `fs.glob(pattern, function)`
Run the callback function with the files matching pattern. See below example:
```lua
local fs = require("fs")
local ret, err = fs.glob("/tmp/*", function(file)
print(file.path)
print(file.realpath)
end)
```
## `markdown`
```lua
local markdown = require "markdown"
```
Markdown -> HTML for string and file
### API
#### `markdown.dostring(text)`
Returns HTML string generated from the markdown text.
#### `markdown.dofile(file)`
Returns HTML string generated from the markdown text file. If this function fails, it returns `nil`, plus a string describing the error.
## `question`
```lua
local question = require "question"
```
Prompt library
### API
* `question.ask(text)`
* `question.secret(text)`
## `ssh`
```lua
local ssh = require "ssh"
```
SSH client library
https://github.com/kohkimakimoto/gluassh/blob/master/gluassh_test.go
## `template`
```lua
local template = require "template"
```
Go text templates
### API
#### `template.dostring(text, table)`
Returns string generated by text template with the table values. If this function fails, it returns `nil`, plus a string describing the error.
#### `template.dofile(file, table)`
Returns string generated by file template with the table values. If this function fails, it returns `nil`, plus a string describing the error.
## `yaml`
```lua
local yaml = require "yaml"
```
Yaml -> table parser
### API
#### `yaml.parse(string)`
Parses yaml formatted string and returns a table. If this function fails, it returns `nil`, plus a string describing the error.
## `flag`
```lua
local flag = require "flag"
```
Command line flag parsing.
See the tests here: https://github.com/otm/gluaflag
```lua
local flag = require "flag"
fs = flag.new()
fs:string("name", "foo", "String help string")
fs:intArg("title", 1, "Title")
fs:numberArg("title", 1, "Title")
flags = fs:parse(arg) -- arg is the remaining command line arguments
assert(flags.title == 2, "expected title to be 2")
assert(flags.title == 2.32, "expected title to be 2.32")
```
## `sh`
```lua
local sh = require "sh"
```
gluash is a interface to call any program as it were a function. Programs are executed asynchronously to enable streaming of data in pipes.
In all discussions bellow the imported module will be referred to as `sh`.
Commands are called just like functions, executed on the sh module.
```lua
sh.ls("/")
```
For commands that have exotic names, names that are reserved words, or to execute absolute or relative paths call the sh module directly.
```lua
sh("/bin/ls", "/")
```
#### Multiple Arguments
Commands with multiple arguments have to be invoked with a separate string for each argument.
```lua
-- this works
sh.ls("-la", "/")
-- this does not work
sh.ls("-la /")
```
#### Piping
Piping in sh is done almost like piping in the shell. Just call next command as a method on the previous command.
```lua
sh.du("-sb"):sort("-rn"):print()
```
If the command has a exotic name, or a reserved word, call the command through `cmd(path, ...args)`. The first argument in `cmd` is the path.
```lua
sh.du("-sb"):cmd("sort", "-rn"):print()
```
### Waiting for Processes
All commands are executed by default in the background, so one have to explicitly wait for a process to finish. There are several ways to wait for the command to finish.
* `print()` - write stdout and stderr to stdout.
* `ok()` - aborts execution if the command's exit code is not zero
* `success()` - returns true of the commands exit code is zero
* `exitcode()` - returns the exit code of the command
### Abort by Default
It is possible to set the module to abort on errors without checking. It can be practical in some occasions, however performance will be degraded. When global exit code checks are done the commands are run in series, even in pipes, and output is saved in memory buffers.
To enable global exit code settings call the sh module with an table with the key `abort` set to true.
```lua
sh{abort=true}
```
To read current settings in the module call the module with an empty table.
```lua
configuration = sh{}
print("abort:", configuration.abort)
```
### Analyzing Output
There are several options to analyze the output of a command.
#### lines()
An iterator is accessible by calling the method `lines()` on the command.
```lua
for line in sh.cat("/etc/hosts"):lines() do
print(line)
end
```
#### stdout([filename]), stderr([filename]), combinedOutput([filename])
`stdout()`, `stderr()`, and `combinedOutput()` all returns the output of the command as a string. An optional `filename` can be given to the method, in that case the output is also written to the file. The file will be truncated.
```lua
-- print output of command
output = sh.echo("hello world"):combinedOutput("/tmp/output")
print(output)
```
In the example above will print `hello world` and it will write it to `/tmp/output`
### Glob Expansion
There is no glob expansion done on arguments, however there is a glob functionality in sh.
```lua
sh.ls(sh.glob("*.go"))
```
## `re`
```lua
local re = require "re"
```
Regular Expressions
### API
re.find , re.gsub, re.match, re.gmatch are available. These functions have the same API as Lua pattern match.
gluare uses the Go regexp package, so you can use regular expressions that are supported in the Go regexp package.
In addition, the following functions are defined:
```
gluare.quote(s string) -> string
Arguments:
s string: a string value to escape meta characters
Returns:
string: escaped string
gluare.quote returns a string that quotes all regular expression metacharacters inside the given text.
```
## `simplebox`
```lua
local simplebox = require "simplebox"
```
Simple encryption
### API
#### Create a new instance of simplebox with a newly generated key
```lua
local simplebox = require "simplebox"
local key = simplebox.genkey()
print("key is: " .. key)
local sb = simplebox.new()
```

View File

@ -1,4 +0,0 @@
FROM busybox
ADD glue /glue
CMD /glue

View File

@ -1,181 +0,0 @@
memo = ""
[[projects]]
branch = "master"
name = "github.com/ThomasRooney/gexpect"
packages = ["."]
revision = "5482f03509440585d13d8f648989e05903001842"
[[projects]]
branch = "master"
name = "github.com/ailncode/gluaxmlpath"
packages = ["."]
revision = "6ce478ecb4a60c4fc8929838e0b21b7fb7ca7440"
[[projects]]
branch = "master"
name = "github.com/brandur/simplebox"
packages = ["."]
revision = "84e9865bb03ad38c464043bf5382ce8c68ca5f0c"
[[projects]]
branch = "master"
name = "github.com/cjoudrey/gluahttp"
packages = ["."]
revision = "b4bfe0c50fea948dcbf3966e120996d6607bbd89"
[[projects]]
branch = "master"
name = "github.com/cjoudrey/gluaurl"
packages = ["."]
revision = "31cbb9bef199454415879f2e6d609d1136d60cad"
[[projects]]
branch = "master"
name = "github.com/howeyc/gopass"
packages = ["."]
revision = "bf9dde6d0d2c004a008c27aaee91170c786f6db8"
[[projects]]
branch = "master"
name = "github.com/kballard/go-shellquote"
packages = ["."]
revision = "d8ec1a69a250a17bb0e419c386eac1f3711dc142"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluaenv"
packages = ["."]
revision = "2888db6bbe38923d59c42e443895875cc8ce0820"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluafs"
packages = ["."]
revision = "01391ed2d7ab89dc80157605b073403f960aa223"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluaquestion"
packages = ["."]
revision = "311437c29ba54d027ad2af383661725ae2bfdcdc"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluassh"
packages = ["."]
revision = "2a7bd48d7568de8230c87ac1ef4a4c481e45814d"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluatemplate"
packages = ["."]
revision = "d9e2c9d6b00f069a9da377a9ac529c827c1c7d71"
[[projects]]
branch = "master"
name = "github.com/kohkimakimoto/gluayaml"
packages = ["."]
revision = "6fe413d49d73d785510ecf1529991ab0573e96c7"
[[projects]]
branch = "master"
name = "github.com/kr/fs"
packages = ["."]
revision = "2788f0dbd16903de03cb8186e5c7d97b69ad387b"
[[projects]]
branch = "master"
name = "github.com/kr/pty"
packages = ["."]
revision = "ce7fa45920dc37a92de8377972e52bc55ffa8d57"
[[projects]]
branch = "master"
name = "github.com/mitchellh/mapstructure"
packages = ["."]
revision = "cc8532a8e9a55ea36402aa21efdf403a60d34096"
[[projects]]
branch = "master"
name = "github.com/otm/gluaflag"
packages = ["."]
revision = "078088de689148194436293886e8e39809167332"
[[projects]]
branch = "master"
name = "github.com/otm/gluash"
packages = ["."]
revision = "e145c563986f0b91f740a758a84bca46c163aec7"
[[projects]]
branch = "master"
name = "github.com/pkg/sftp"
packages = ["."]
revision = "e84cc8c755ca39b7b64f510fe1fffc1b51f210a5"
[[projects]]
branch = "master"
name = "github.com/yookoala/realpath"
packages = ["."]
revision = "c416d99ab5ed256fa30c1f3bab73152deb59bb69"
[[projects]]
branch = "master"
name = "github.com/yuin/gluamapper"
packages = ["."]
revision = "d836955830e75240d46ce9f0e6d148d94f2e1d3a"
[[projects]]
branch = "master"
name = "github.com/yuin/gluare"
packages = ["."]
revision = "8e2742cd1bf2b904720ac66eca3c2091b2ea0720"
[[projects]]
branch = "master"
name = "github.com/yuin/gopher-lua"
packages = [".","parse"]
revision = "33ebc07735566cd0c3c4b69e2839d522cc389852"
[[projects]]
branch = "master"
name = "golang.org/x/crypto"
packages = ["nacl/secretbox","ssh/terminal","ssh","ssh/agent"]
revision = "dd85ac7e6a88fc6ca420478e934de5f1a42dd3c6"
[[projects]]
branch = "master"
name = "golang.org/x/net"
packages = ["html"]
revision = "66aacef3dd8a676686c7ae3716979581e8b03c47"
[[projects]]
branch = "master"
name = "golang.org/x/sys"
packages = ["unix"]
revision = "9ccfe848b9db8435a24c424abbc07a921adf1df5"
[[projects]]
branch = "v2"
name = "gopkg.in/xmlpath.v2"
packages = ["."]
revision = "860cbeca3ebcc600db0b213c0e83ad6ce91f5739"
[[projects]]
branch = "master"
name = "gopkg.in/yaml.v2"
packages = ["."]
revision = "cd8b52f8269e0feb286dfeef29f8fe4d5b397e0b"
[[projects]]
branch = "master"
name = "layeh.com/gopher-json"
packages = ["."]
revision = "c128cc74278be889c4381681712931976fe0d88b"
[[projects]]
branch = "master"
name = "layeh.com/gopher-luar"
packages = ["."]
revision = "80196fe2abc5682963fc7a5261f5a5d77509938b"

View File

@ -1,68 +0,0 @@
[[dependencies]]
branch = "master"
name = "github.com/ThomasRooney/gexpect"
[[dependencies]]
branch = "master"
name = "github.com/ailncode/gluaxmlpath"
[[dependencies]]
branch = "master"
name = "github.com/brandur/simplebox"
[[dependencies]]
branch = "master"
name = "github.com/cjoudrey/gluahttp"
[[dependencies]]
branch = "master"
name = "github.com/cjoudrey/gluaurl"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluaenv"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluafs"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluaquestion"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluassh"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluatemplate"
[[dependencies]]
branch = "master"
name = "github.com/kohkimakimoto/gluayaml"
[[dependencies]]
branch = "master"
name = "github.com/otm/gluaflag"
[[dependencies]]
branch = "master"
name = "github.com/otm/gluash"
[[dependencies]]
branch = "master"
name = "github.com/yuin/gluare"
[[dependencies]]
branch = "master"
name = "github.com/yuin/gopher-lua"
[[dependencies]]
branch = "master"
name = "layeh.com/gopher-json"
[[dependencies]]
branch = "master"
name = "layeh.com/gopher-luar"

View File

@ -1,18 +0,0 @@
glue
====
Basically gopher-lua's cmd/glua with the following modules imported:
- https://godoc.org/layeh.com/gopher-json
- https://github.com/ailncode/gluaxmlpath
- https://github.com/cjoudrey/gluahttp
- https://github.com/cjoudrey/gluaurl
- https://github.com/kohkimakimoto/gluaenv
- https://github.com/kohkimakimoto/gluafs
- https://github.com/kohkimakimoto/gluamarkdown
- https://github.com/kohkimakimoto/gluaquestion
- https://github.com/kohkimakimoto/gluassh
- https://github.com/kohkimakimoto/gluatemplate
- https://github.com/kohkimakimoto/gluayaml
- https://github.com/otm/gluaflag
- https://github.com/otm/gluash
- https://github.com/yuin/gluare

View File

@ -1,6 +0,0 @@
from "alpine:edge"
copy "glue", "/glue"
cmd "/glue"
flatten
tag "xena/glue"

View File

@ -1,20 +0,0 @@
-- expects glue, $ go get -u github.com/Xe/tools/glue
local sh = require "sh"
sh { abort = true }
if os.getenv("CGO_ENABLED") ~= "0" then
error("CGO_ENABLED must be set to 1")
end
print "building glue..."
sh.go("build"):print()
sh.upx("--ultra-brute", "glue"):print()
sh.box("box.rb"):print()
print "releasing to docker hub"
sh.docker("push", "xena/glue"):print()
print "moving glue binary to $GOPATH/bin"
sh.mv("glue", (os.getenv("GOPATH") .. "/bin/glue"))
print "build/release complete"

Binary file not shown.

View File

@ -1,213 +0,0 @@
package main
import (
"bufio"
"flag"
"fmt"
"net/http"
"os"
"runtime/pprof"
"github.com/Xe/x/tools/glue/libs/gluaexpect"
"github.com/Xe/x/tools/glue/libs/gluasimplebox"
"github.com/ailncode/gluaxmlpath"
"github.com/cjoudrey/gluahttp"
"github.com/cjoudrey/gluaurl"
"github.com/kohkimakimoto/gluaenv"
"github.com/kohkimakimoto/gluafs"
"github.com/kohkimakimoto/gluaquestion"
"github.com/kohkimakimoto/gluassh"
"github.com/kohkimakimoto/gluatemplate"
"github.com/kohkimakimoto/gluayaml"
"github.com/otm/gluaflag"
"github.com/otm/gluash"
"github.com/yuin/gluare"
"github.com/yuin/gopher-lua"
"github.com/yuin/gopher-lua/parse"
json "layeh.com/gopher-json"
)
func main() {
os.Exit(mainAux())
}
func mainAux() int {
var opt_e, opt_l, opt_p string
var opt_i, opt_v, opt_dt, opt_dc bool
var opt_m int
flag.StringVar(&opt_e, "e", "", "")
flag.StringVar(&opt_l, "l", "", "")
flag.StringVar(&opt_p, "p", "", "")
flag.IntVar(&opt_m, "mx", 0, "")
flag.BoolVar(&opt_i, "i", false, "")
flag.BoolVar(&opt_v, "v", false, "")
flag.BoolVar(&opt_dt, "dt", false, "")
flag.BoolVar(&opt_dc, "dc", false, "")
flag.Usage = func() {
fmt.Println(`Usage: glue [options] [script [args]].
Available options are:
-e stat execute string 'stat'
-l name require library 'name'
-mx MB memory limit(default: unlimited)
-dt dump AST trees
-dc dump VM codes
-i enter interactive mode after executing 'script'
-p file write cpu profiles to the file
-v show version information
`)
}
flag.Parse()
if len(opt_p) != 0 {
f, err := os.Create(opt_p)
if err != nil {
fmt.Println(err.Error())
os.Exit(1)
}
pprof.StartCPUProfile(f)
defer pprof.StopCPUProfile()
}
if len(opt_e) == 0 && !opt_i && !opt_v && flag.NArg() == 0 {
opt_i = true
}
status := 0
L := lua.NewState()
defer L.Close()
if opt_m > 0 {
L.SetMx(opt_m)
}
preload(L)
if opt_v || opt_i {
fmt.Println(lua.PackageCopyRight)
}
if len(opt_l) > 0 {
if err := L.DoFile(opt_l); err != nil {
fmt.Println(err.Error())
}
}
if nargs := flag.NArg(); nargs > 0 {
script := flag.Arg(0)
argtb := L.NewTable()
for i := 1; i < nargs; i++ {
L.RawSet(argtb, lua.LNumber(i), lua.LString(flag.Arg(i)))
}
L.SetGlobal("arg", argtb)
if opt_dt || opt_dc {
file, err := os.Open(script)
if err != nil {
fmt.Println(err.Error())
return 1
}
chunk, err2 := parse.Parse(file, script)
if err2 != nil {
fmt.Println(err2.Error())
return 1
}
if opt_dt {
fmt.Println(parse.Dump(chunk))
}
if opt_dc {
proto, err3 := lua.Compile(chunk, script)
if err3 != nil {
fmt.Println(err3.Error())
return 1
}
fmt.Println(proto.String())
}
}
if err := L.DoFile(script); err != nil {
fmt.Println(err.Error())
status = 1
}
}
if len(opt_e) > 0 {
if err := L.DoString(opt_e); err != nil {
fmt.Println(err.Error())
status = 1
}
}
if opt_i {
doREPL(L)
}
return status
}
func preload(L *lua.LState) {
L.PreloadModule("re", gluare.Loader)
L.PreloadModule("sh", gluash.Loader)
L.PreloadModule("fs", gluafs.Loader)
L.PreloadModule("env", gluaenv.Loader)
L.PreloadModule("yaml", gluayaml.Loader)
L.PreloadModule("question", gluaquestion.Loader)
L.PreloadModule("ssh", gluassh.Loader)
L.PreloadModule("http", gluahttp.NewHttpModule(&http.Client{}).Loader)
L.PreloadModule("flag", gluaflag.Loader)
L.PreloadModule("template", gluatemplate.Loader)
L.PreloadModule("url", gluaurl.Loader)
gluaexpect.Preload(L)
gluasimplebox.Preload(L)
gluaxmlpath.Preload(L)
json.Preload(L)
}
// do read/eval/print/loop
func doREPL(L *lua.LState) {
reader := bufio.NewReader(os.Stdin)
for {
if str, err := loadline(reader, L); err == nil {
if err := L.DoString(str); err != nil {
fmt.Println(err)
}
} else { // error on loadline
fmt.Println(err)
return
}
}
}
func incomplete(err error) bool {
if lerr, ok := err.(*lua.ApiError); ok {
if perr, ok := lerr.Cause.(*parse.Error); ok {
return perr.Pos.Line == parse.EOF
}
}
return false
}
func loadline(reader *bufio.Reader, L *lua.LState) (string, error) {
fmt.Print("> ")
if line, err := reader.ReadString('\n'); err == nil {
if _, err := L.LoadString("return " + line); err == nil { // try add return <...> then compile
return line, nil
} else {
return multiline(line, reader, L)
}
} else {
return "", err
}
}
func multiline(ml string, reader *bufio.Reader, L *lua.LState) (string, error) {
for {
if _, err := L.LoadString(ml); err == nil { // try compile
return ml, nil
} else if !incomplete(err) { // syntax error , but not EOF
return ml, nil
} else {
fmt.Print(">> ")
if line, err := reader.ReadString('\n'); err == nil {
ml = ml + "\n" + line
} else {
return "", err
}
}
}
}

View File

@ -1,35 +0,0 @@
package gluaexpect
import (
"github.com/ThomasRooney/gexpect"
lua "github.com/yuin/gopher-lua"
luar "layeh.com/gopher-luar"
)
func Preload(L *lua.LState) {
L.PreloadModule("expect", Loader)
}
// Loader is the module loader function.
func Loader(L *lua.LState) int {
mod := L.SetFuncs(L.NewTable(), api)
L.Push(mod)
return 1
}
var api = map[string]lua.LGFunction{
"spawn": spawn,
}
func spawn(L *lua.LState) int {
cmd := L.CheckString(1)
child, err := gexpect.Spawn(cmd)
if err != nil {
L.Push(lua.LNil)
L.Push(lua.LString(err.Error()))
return 2
}
L.Push(luar.New(L, child))
return 1
}

View File

@ -1,100 +0,0 @@
package gluasimplebox
import (
"crypto/rand"
"encoding/base64"
"encoding/hex"
"errors"
"github.com/brandur/simplebox"
lua "github.com/yuin/gopher-lua"
luar "layeh.com/gopher-luar"
)
func Preload(L *lua.LState) {
L.PreloadModule("simplebox", Loader)
}
// Loader is the module loader function.
func Loader(L *lua.LState) int {
mod := L.SetFuncs(L.NewTable(), api)
L.Push(mod)
return 1
}
var api = map[string]lua.LGFunction{
"new": newSecretBox,
"genkey": genKey,
}
func newSecretBox(L *lua.LState) int {
key := L.CheckString(1)
k, err := parseKey(key)
if err != nil {
L.Push(lua.LNil)
L.Push(lua.LString(err.Error()))
return 2
}
sb := simplebox.NewFromSecretKey(k)
L.Push(luar.New(L, &box{sb: sb}))
return 1
}
func genKey(L *lua.LState) int {
key, err := generateKey()
if err != nil {
L.Push(lua.LNil)
L.Push(lua.LString(err.Error()))
return 2
}
L.Push(lua.LString(base64.URLEncoding.EncodeToString(key[:])))
return 1
}
func generateKey() (*[32]byte, error) {
var k [32]byte
_, err := rand.Read(k[:])
if err != nil {
return nil, err
}
return &k, nil
}
func parseKey(s string) (*[32]byte, error) {
k := &[32]byte{}
raw, err := base64.URLEncoding.DecodeString(s)
if err != nil {
return nil, err
}
if n := copy(k[:], raw); n < len(k) {
return nil, errors.New("not valid")
}
return k, nil
}
type box struct {
sb *simplebox.SimpleBox
}
func (b *box) Encrypt(data string) string {
result := b.sb.Encrypt([]byte(data))
return hex.EncodeToString(result)
}
func (b *box) Decrypt(data string) (string, error) {
d, err := hex.DecodeString(data)
if err != nil {
return "", err
}
plain, err := b.sb.Decrypt([]byte(d))
if err != nil {
return "", err
}
return string(plain), nil
}

View File

@ -1,2 +0,0 @@
README.html
coverage.out

View File

@ -1,70 +0,0 @@
language: go
sudo: false
go:
- 1.8
- 1.7.5
- 1.7.4
- 1.7.3
- 1.7.2
- 1.7.1
- 1.7
- tip
- 1.6.4
- 1.6.3
- 1.6.2
- 1.6.1
- 1.6
- 1.5.4
- 1.5.3
- 1.5.2
- 1.5.1
- 1.5
- 1.4.3
- 1.4.2
- 1.4.1
- 1.4
- 1.3.3
- 1.3.2
- 1.3.1
- 1.3
- 1.2.2
- 1.2.1
- 1.2
- 1.1.2
- 1.1.1
- 1.1
before_install:
- go get github.com/mattn/goveralls
script:
- $HOME/gopath/bin/goveralls -service=travis-ci
notifications:
email:
on_success: never
matrix:
fast_finish: true
allow_failures:
- go: tip
- go: 1.6.4
- go: 1.6.3
- go: 1.6.2
- go: 1.6.1
- go: 1.6
- go: 1.5.4
- go: 1.5.3
- go: 1.5.2
- go: 1.5.1
- go: 1.5
- go: 1.4.3
- go: 1.4.2
- go: 1.4.1
- go: 1.4
- go: 1.3.3
- go: 1.3.2
- go: 1.3.1
- go: 1.3
- go: 1.2.2
- go: 1.2.1
- go: 1.2
- go: 1.1.2
- go: 1.1.1
- go: 1.1

View File

@ -1,36 +0,0 @@
Developer Certificate of Origin
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
660 York Street, Suite 102,
San Francisco, CA 94110 USA
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1 +0,0 @@
Alex Bucataru <alex@alrux.com> (@AlexBucataru)

View File

@ -1,5 +0,0 @@
Alrux Go EXTensions (AGExt) - package levenshtein
Copyright 2016 ALRUX Inc.
This product includes software developed at ALRUX Inc.
(http://www.alrux.com/).

View File

@ -1,38 +0,0 @@
# A Go package for calculating the Levenshtein distance between two strings
[![Release](https://img.shields.io/github/release/agext/levenshtein.svg?style=flat)](https://github.com/agext/levenshtein/releases/latest)
[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg?style=flat)](https://godoc.org/github.com/agext/levenshtein) 
[![Build Status](https://travis-ci.org/agext/levenshtein.svg?branch=master&style=flat)](https://travis-ci.org/agext/levenshtein)
[![Coverage Status](https://coveralls.io/repos/github/agext/levenshtein/badge.svg?style=flat)](https://coveralls.io/github/agext/levenshtein)
[![Go Report Card](https://goreportcard.com/badge/github.com/agext/levenshtein?style=flat)](https://goreportcard.com/report/github.com/agext/levenshtein)
This package implements distance and similarity metrics for strings, based on the Levenshtein measure, in [Go](http://golang.org).
## Project Status
v1.2.1 Stable: Guaranteed no breaking changes to the API in future v1.x releases. Probably safe to use in production, though provided on "AS IS" basis.
This package is being actively maintained. If you encounter any problems or have any suggestions for improvement, please [open an issue](https://github.com/agext/levenshtein/issues). Pull requests are welcome.
## Overview
The Levenshtein `Distance` between two strings is the minimum total cost of edits that would convert the first string into the second. The allowed edit operations are insertions, deletions, and substitutions, all at character (one UTF-8 code point) level. Each operation has a default cost of 1, but each can be assigned its own cost equal to or greater than 0.
A `Distance` of 0 means the two strings are identical, and the higher the value the more different the strings. Since in practice we are interested in finding if the two strings are "close enough", it often does not make sense to continue the calculation once the result is mathematically guaranteed to exceed a desired threshold. Providing this value to the `Distance` function allows it to take a shortcut and return a lower bound instead of an exact cost when the threshold is exceeded.
The `Similarity` function calculates the distance, then converts it into a normalized metric within the range 0..1, with 1 meaning the strings are identical, and 0 that they have nothing in common. A minimum similarity threshold can be provided to speed up the calculation of the metric for strings that are far too dissimilar for the purpose at hand. All values under this threshold are rounded down to 0.
The `Match` function provides a similarity metric, with the same range and meaning as `Similarity`, but with a bonus for string pairs that share a common prefix and have a similarity above a "bonus threshold". It uses the same method as proposed by Winkler for the Jaro distance, and the reasoning behind it is that these string pairs are very likely spelling variations or errors, and they are more closely linked than the edit distance alone would suggest.
The underlying `Calculate` function is also exported, to allow the building of other derivative metrics, if needed.
## Installation
```
go get github.com/agext/levenshtein
```
## License
Package levenshtein is released under the Apache 2.0 license. See the [LICENSE](LICENSE) file for details.

View File

@ -1,290 +0,0 @@
// Copyright 2016 ALRUX Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package levenshtein implements distance and similarity metrics for strings, based on the Levenshtein measure.
The Levenshtein `Distance` between two strings is the minimum total cost of edits that would convert the first string into the second. The allowed edit operations are insertions, deletions, and substitutions, all at character (one UTF-8 code point) level. Each operation has a default cost of 1, but each can be assigned its own cost equal to or greater than 0.
A `Distance` of 0 means the two strings are identical, and the higher the value the more different the strings. Since in practice we are interested in finding if the two strings are "close enough", it often does not make sense to continue the calculation once the result is mathematically guaranteed to exceed a desired threshold. Providing this value to the `Distance` function allows it to take a shortcut and return a lower bound instead of an exact cost when the threshold is exceeded.
The `Similarity` function calculates the distance, then converts it into a normalized metric within the range 0..1, with 1 meaning the strings are identical, and 0 that they have nothing in common. A minimum similarity threshold can be provided to speed up the calculation of the metric for strings that are far too dissimilar for the purpose at hand. All values under this threshold are rounded down to 0.
The `Match` function provides a similarity metric, with the same range and meaning as `Similarity`, but with a bonus for string pairs that share a common prefix and have a similarity above a "bonus threshold". It uses the same method as proposed by Winkler for the Jaro distance, and the reasoning behind it is that these string pairs are very likely spelling variations or errors, and they are more closely linked than the edit distance alone would suggest.
The underlying `Calculate` function is also exported, to allow the building of other derivative metrics, if needed.
*/
package levenshtein
// Calculate determines the Levenshtein distance between two strings, using
// the given costs for each edit operation. It returns the distance along with
// the lengths of the longest common prefix and suffix.
//
// If maxCost is non-zero, the calculation stops as soon as the distance is determined
// to be greater than maxCost. Therefore, any return value higher than maxCost is a
// lower bound for the actual distance.
func Calculate(str1, str2 []rune, maxCost, insCost, subCost, delCost int) (dist, prefixLen, suffixLen int) {
l1, l2 := len(str1), len(str2)
// trim common prefix, if any, as it doesn't affect the distance
for ; prefixLen < l1 && prefixLen < l2; prefixLen++ {
if str1[prefixLen] != str2[prefixLen] {
break
}
}
str1, str2 = str1[prefixLen:], str2[prefixLen:]
l1 -= prefixLen
l2 -= prefixLen
// trim common suffix, if any, as it doesn't affect the distance
for 0 < l1 && 0 < l2 {
if str1[l1-1] != str2[l2-1] {
str1, str2 = str1[:l1], str2[:l2]
break
}
l1--
l2--
suffixLen++
}
// if the first string is empty, the distance is the length of the second string times the cost of insertion
if l1 == 0 {
dist = l2 * insCost
return
}
// if the second string is empty, the distance is the length of the first string times the cost of deletion
if l2 == 0 {
dist = l1 * delCost
return
}
// variables used in inner "for" loops
var y, dy, c, l int
// if maxCost is greater than or equal to the maximum possible distance, it's equivalent to 'unlimited'
if maxCost > 0 {
if subCost < delCost+insCost {
if maxCost >= l1*subCost+(l2-l1)*insCost {
maxCost = 0
}
} else {
if maxCost >= l1*delCost+l2*insCost {
maxCost = 0
}
}
}
if maxCost > 0 {
// prefer the longer string first, to minimize time;
// a swap also transposes the meanings of insertion and deletion.
if l1 < l2 {
str1, str2, l1, l2, insCost, delCost = str2, str1, l2, l1, delCost, insCost
}
// the length differential times cost of deletion is a lower bound for the cost;
// if it is higher than the maxCost, there is no point going into the main calculation.
if dist = (l1 - l2) * delCost; dist > maxCost {
return
}
d := make([]int, l1+1)
// offset and length of d in the current row
doff, dlen := 0, 1
for y, dy = 1, delCost; y <= l1 && dy <= maxCost; dlen++ {
d[y] = dy
y++
dy = y * delCost
}
// fmt.Printf("%q -> %q: init doff=%d dlen=%d d[%d:%d]=%v\n", str1, str2, doff, dlen, doff, doff+dlen, d[doff:doff+dlen])
for x := 0; x < l2; x++ {
dy, d[doff] = d[doff], d[doff]+insCost
for d[doff] > maxCost && dlen > 0 {
if str1[doff] != str2[x] {
dy += subCost
}
doff++
dlen--
if c = d[doff] + insCost; c < dy {
dy = c
}
dy, d[doff] = d[doff], dy
}
for y, l = doff, doff+dlen-1; y < l; dy, d[y] = d[y], dy {
if str1[y] != str2[x] {
dy += subCost
}
if c = d[y] + delCost; c < dy {
dy = c
}
y++
if c = d[y] + insCost; c < dy {
dy = c
}
}
if y < l1 {
if str1[y] != str2[x] {
dy += subCost
}
if c = d[y] + delCost; c < dy {
dy = c
}
for ; dy <= maxCost && y < l1; dy, d[y] = dy+delCost, dy {
y++
dlen++
}
}
// fmt.Printf("%q -> %q: x=%d doff=%d dlen=%d d[%d:%d]=%v\n", str1, str2, x, doff, dlen, doff, doff+dlen, d[doff:doff+dlen])
if dlen == 0 {
dist = maxCost + 1
return
}
}
if doff+dlen-1 < l1 {
dist = maxCost + 1
return
}
dist = d[l1]
} else {
// ToDo: This is O(l1*l2) time and O(min(l1,l2)) space; investigate if it is
// worth to implement diagonal approach - O(l1*(1+dist)) time, up to O(l1*l2) space
// http://www.csse.monash.edu.au/~lloyd/tildeStrings/Alignment/92.IPL.html
// prefer the shorter string first, to minimize space; time is O(l1*l2) anyway;
// a swap also transposes the meanings of insertion and deletion.
if l1 > l2 {
str1, str2, l1, l2, insCost, delCost = str2, str1, l2, l1, delCost, insCost
}
d := make([]int, l1+1)
for y = 1; y <= l1; y++ {
d[y] = y * delCost
}
for x := 0; x < l2; x++ {
dy, d[0] = d[0], d[0]+insCost
for y = 0; y < l1; dy, d[y] = d[y], dy {
if str1[y] != str2[x] {
dy += subCost
}
if c = d[y] + delCost; c < dy {
dy = c
}
y++
if c = d[y] + insCost; c < dy {
dy = c
}
}
}
dist = d[l1]
}
return
}
// Distance returns the Levenshtein distance between str1 and str2, using the
// default or provided cost values. Pass nil for the third argument to use the
// default cost of 1 for all three operations, with no maximum.
func Distance(str1, str2 string, p *Params) int {
if p == nil {
p = defaultParams
}
dist, _, _ := Calculate([]rune(str1), []rune(str2), p.maxCost, p.insCost, p.subCost, p.delCost)
return dist
}
// Similarity returns a score in the range of 0..1 for how similar the two strings are.
// A score of 1 means the strings are identical, and 0 means they have nothing in common.
//
// A nil third argument uses the default cost of 1 for all three operations.
//
// If a non-zero MinScore value is provided in the parameters, scores lower than it
// will be returned as 0.
func Similarity(str1, str2 string, p *Params) float64 {
return Match(str1, str2, p.Clone().BonusThreshold(1.1)) // guaranteed no bonus
}
// Match returns a similarity score adjusted by the same method as proposed by Winkler for
// the Jaro distance - giving a bonus to string pairs that share a common prefix, only if their
// similarity score is already over a threshold.
//
// The score is in the range of 0..1, with 1 meaning the strings are identical,
// and 0 meaning they have nothing in common.
//
// A nil third argument uses the default cost of 1 for all three operations, maximum length of
// common prefix to consider for bonus of 4, scaling factor of 0.1, and bonus threshold of 0.7.
//
// If a non-zero MinScore value is provided in the parameters, scores lower than it
// will be returned as 0.
func Match(str1, str2 string, p *Params) float64 {
s1, s2 := []rune(str1), []rune(str2)
l1, l2 := len(s1), len(s2)
// two empty strings are identical; shortcut also avoids divByZero issues later on.
if l1 == 0 && l2 == 0 {
return 1
}
if p == nil {
p = defaultParams
}
// a min over 1 can never be satisfied, so the score is 0.
if p.minScore > 1 {
return 0
}
insCost, delCost, maxDist, max := p.insCost, p.delCost, 0, 0
if l1 > l2 {
l1, l2, insCost, delCost = l2, l1, delCost, insCost
}
if p.subCost < delCost+insCost {
maxDist = l1*p.subCost + (l2-l1)*insCost
} else {
maxDist = l1*delCost + l2*insCost
}
// a zero min is always satisfied, so no need to set a max cost.
if p.minScore > 0 {
// if p.minScore is lower than p.bonusThreshold, we can use a simplified formula
// for the max cost, because a sim score below min cannot receive a bonus.
if p.minScore < p.bonusThreshold {
// round down the max - a cost equal to a rounded up max would already be under min.
max = int((1 - p.minScore) * float64(maxDist))
} else {
// p.minScore <= sim + p.bonusPrefix*p.bonusScale*(1-sim)
// p.minScore <= (1-dist/maxDist) + p.bonusPrefix*p.bonusScale*(1-(1-dist/maxDist))
// p.minScore <= 1 - dist/maxDist + p.bonusPrefix*p.bonusScale*dist/maxDist
// 1 - p.minScore >= dist/maxDist - p.bonusPrefix*p.bonusScale*dist/maxDist
// (1-p.minScore)*maxDist/(1-p.bonusPrefix*p.bonusScale) >= dist
max = int((1 - p.minScore) * float64(maxDist) / (1 - float64(p.bonusPrefix)*p.bonusScale))
}
}
dist, pl, _ := Calculate(s1, s2, max, p.insCost, p.subCost, p.delCost)
if max > 0 && dist > max {
return 0
}
sim := 1 - float64(dist)/float64(maxDist)
if sim >= p.bonusThreshold && sim < 1 && p.bonusPrefix > 0 && p.bonusScale > 0 {
if pl > p.bonusPrefix {
pl = p.bonusPrefix
}
sim += float64(pl) * p.bonusScale * (1 - sim)
}
if sim < p.minScore {
return 0
}
return sim
}

View File

@ -1,213 +0,0 @@
// Copyright 2016 ALRUX Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package levenshtein
import (
"testing"
)
type e struct {
cost, lp, ls int
sim, match float64
}
func Test_Metrics(t *testing.T) {
var (
cases = []struct {
s1 string
s2 string
desc string
p *Params
exp e
}{
// When the values are the same...
{"", "", "", nil, e{0, 0, 0, 1, 1}},
{"1", "1", "", nil, e{0, 1, 0, 1, 1}},
{"12", "12", "", nil, e{0, 2, 0, 1, 1}},
{"123", "123", "", nil, e{0, 3, 0, 1, 1}},
{"1234", "1234", "", nil, e{0, 4, 0, 1, 1}},
{"12345", "12345", "", nil, e{0, 5, 0, 1, 1}},
{"password", "password", "", nil, e{0, 8, 0, 1, 1}},
// When one of the values is empty...
{"", "1", "", nil, e{1, 0, 0, 0, 0}},
{"", "12", "", nil, e{2, 0, 0, 0, 0}},
{"", "123", "", nil, e{3, 0, 0, 0, 0}},
{"", "1234", "", nil, e{4, 0, 0, 0, 0}},
{"", "12345", "", nil, e{5, 0, 0, 0, 0}},
{"", "password", "", nil, e{8, 0, 0, 0, 0}},
{"1", "", "", nil, e{1, 0, 0, 0, 0}},
{"12", "", "", nil, e{2, 0, 0, 0, 0}},
{"123", "", "", nil, e{3, 0, 0, 0, 0}},
{"1234", "", "", nil, e{4, 0, 0, 0, 0}},
{"12345", "", "", nil, e{5, 0, 0, 0, 0}},
{"password", "", "", nil, e{8, 0, 0, 0, 0}},
// When a single character is inserted or removed...
{"password", "1password", "", nil, e{1, 0, 8, 8.0 / 9, 8.0 / 9}},
{"password", "p1assword", "", nil, e{1, 1, 7, 8.0 / 9, 8.1 / 9}},
{"password", "pa1ssword", "", nil, e{1, 2, 6, 8.0 / 9, 8.2 / 9}},
{"password", "pas1sword", "", nil, e{1, 3, 5, 8.0 / 9, 8.3 / 9}},
{"password", "pass1word", "", nil, e{1, 4, 4, 8.0 / 9, 8.4 / 9}},
{"password", "passw1ord", "", nil, e{1, 5, 3, 8.0 / 9, 8.4 / 9}},
{"password", "passwo1rd", "", nil, e{1, 6, 2, 8.0 / 9, 8.4 / 9}},
{"password", "passwor1d", "", nil, e{1, 7, 1, 8.0 / 9, 8.4 / 9}},
{"password", "password1", "", nil, e{1, 8, 0, 8.0 / 9, 8.4 / 9}},
{"password", "assword", "", nil, e{1, 0, 7, 7.0 / 8, 7.0 / 8}},
{"password", "pssword", "", nil, e{1, 1, 6, 7.0 / 8, 7.1 / 8}},
{"password", "pasword", "", nil, e{1, 3, 4, 7.0 / 8, 7.3 / 8}},
{"password", "passord", "", nil, e{1, 4, 3, 7.0 / 8, 7.4 / 8}},
{"password", "passwrd", "", nil, e{1, 5, 2, 7.0 / 8, 7.4 / 8}},
{"password", "passwod", "", nil, e{1, 6, 1, 7.0 / 8, 7.4 / 8}},
{"password", "passwor", "", nil, e{1, 7, 0, 7.0 / 8, 7.4 / 8}},
// When a single character is replaced...
{"password", "Xassword", "", nil, e{1, 0, 7, 7.0 / 8, 7.0 / 8}},
{"password", "pXssword", "", nil, e{1, 1, 6, 7.0 / 8, 7.1 / 8}},
{"password", "paXsword", "", nil, e{1, 2, 5, 7.0 / 8, 7.2 / 8}},
{"password", "pasXword", "", nil, e{1, 3, 4, 7.0 / 8, 7.3 / 8}},
{"password", "passXord", "", nil, e{1, 4, 3, 7.0 / 8, 7.4 / 8}},
{"password", "passwXrd", "", nil, e{1, 5, 2, 7.0 / 8, 7.4 / 8}},
{"password", "passwoXd", "", nil, e{1, 6, 1, 7.0 / 8, 7.4 / 8}},
{"password", "passworX", "", nil, e{1, 7, 0, 7.0 / 8, 7.4 / 8}},
// If characters are taken off the front and added to the back and all of
// the characters are unique, then the distance is two times the number of
// characters shifted, until you get halfway (and then it becomes easier
// to shift from the other direction).
{"12345678", "23456781", "", nil, e{2, 0, 0, 6. / 8, 6. / 8}},
{"12345678", "34567812", "", nil, e{4, 0, 0, 4. / 8, 4. / 8}},
{"12345678", "45678123", "", nil, e{6, 0, 0, 2. / 8, 2. / 8}},
{"12345678", "56781234", "", nil, e{8, 0, 0, 0, 0}},
{"12345678", "67812345", "", nil, e{6, 0, 0, 2. / 8, 2. / 8}},
{"12345678", "78123456", "", nil, e{4, 0, 0, 4. / 8, 4. / 8}},
{"12345678", "81234567", "", nil, e{2, 0, 0, 6. / 8, 6. / 8}},
// If all the characters are unique and the values are reversed, then the
// distance is the number of characters for an even number of characters,
// and one less for an odd number of characters (since the middle
// character will stay the same).
{"12", "21", "", nil, e{2, 0, 0, 0, 0}},
{"123", "321", "", nil, e{2, 0, 0, 1. / 3, 1. / 3}},
{"1234", "4321", "", nil, e{4, 0, 0, 0, 0}},
{"12345", "54321", "", nil, e{4, 0, 0, 1. / 5, 1. / 5}},
{"123456", "654321", "", nil, e{6, 0, 0, 0, 0}},
{"1234567", "7654321", "", nil, e{6, 0, 0, 1. / 7, 1. / 7}},
{"12345678", "87654321", "", nil, e{8, 0, 0, 0, 0}},
// The results are the same regardless of the string order,
// with the default parameters...
{"password", "1234", "", nil, e{8, 0, 0, 0, 0}},
{"1234", "password", "", nil, e{8, 0, 0, 0, 0}},
{"password", "pass1", "", nil, e{4, 4, 0, 4. / 8, 4. / 8}},
{"pass1", "password", "", nil, e{4, 4, 0, 4. / 8, 4. / 8}},
{"password", "passwor", "", nil, e{1, 7, 0, 7.0 / 8, 7.4 / 8}},
{"passwor", "password", "", nil, e{1, 7, 0, 7.0 / 8, 7.4 / 8}},
// ... but not necessarily so with custom costs:
{"password", "1234", " (D=2)", NewParams().DelCost(2), e{12, 0, 0, 0, 0}},
{"1234", "password", " (D=2)", NewParams().DelCost(2), e{8, 0, 0, 0, 0}},
{"password", "pass1", " (D=2)", NewParams().DelCost(2), e{7, 4, 0, 4. / 11, 4. / 11}},
{"pass1", "password", " (D=2)", NewParams().DelCost(2), e{4, 4, 0, 4. / 8, 4. / 8}},
{"password", "pass1", " (S=3)", NewParams().SubCost(3), e{5, 4, 0, 8. / 13, 8. / 13}},
{"password", "passwor", " (D=2)", NewParams().DelCost(2), e{2, 7, 0, 7.0 / 9, 7.8 / 9}},
{"passwor", "password", " (D=2)", NewParams().DelCost(2), e{1, 7, 0, 7.0 / 8, 7.4 / 8}},
// When setting a maxCost (should not affect Similarity() and Match())...
{"password", "1password2", "(maxCost=6)", NewParams().MaxCost(6), e{2, 0, 0, 8. / 10, 8. / 10}},
{"password", "pass1234", "(maxCost=1)", NewParams().MaxCost(1), e{2, 4, 0, 4. / 8, 4. / 8}},
{"pass1word", "passwords1", "(maxCost=2)", NewParams().MaxCost(2), e{3, 4, 0, 7. / 10, 8.2 / 10}},
{"password", "1passwo", " (D=2,maxCost=1)", NewParams().DelCost(2).MaxCost(1), e{2, 0, 0, 4. / 9, 4. / 9}},
{"pwd", "password", " (I=0,maxCost=0)", NewParams().InsCost(0).MaxCost(0), e{0, 1, 1, 1, 1}},
{"passXword", "password", "(maxCost=10)", NewParams().MaxCost(10), e{1, 4, 4, 8. / 9, 8.4 / 9}},
{"passXord", "password", "(S=3,maxCost=17)", NewParams().SubCost(3).MaxCost(17), e{2, 4, 3, 14. / 16, 14.8 / 16}},
// ... no change because the Calculate is calculated without getting into the main algorithm:
{"password", "pass", "(maxCost=1)", NewParams().MaxCost(1), e{4, 4, 0, 4. / 8, 4. / 8}},
{"password", "1234", " (D=2,maxCost=1)", NewParams().DelCost(2).MaxCost(1), e{8, 0, 0, 0, 0}},
// When setting a minScore (should not affect Calculate() and Distance())...
{"password", "pass1", "(minScore=0.3)", NewParams().MinScore(.3), e{4, 4, 0, 4. / 8, 4. / 8}},
{"password", "pass1", "(minScore=0.6)", NewParams().MinScore(.6), e{4, 4, 0, 0, 0}},
{"password", "pass1wor", "(minScore=0.9)", NewParams().MinScore(.9), e{2, 4, 0, 0, 0}},
{"password", "password", "(minScore=1.1)", NewParams().MinScore(1.1), e{0, 8, 0, 0, 0}},
// The rest of these are miscellaneous examples. They will
// be illustrated using the following key:
// = (the characters are equal)
// + (the character is inserted)
// - (the character is removed)
// # (the character is replaced)
// Mississippi
// ippississiM
// -=##====##=+ --> 6
{"Mississippi", "ippississiM", "", nil, e{6, 0, 0, 5. / 11, 5. / 11}},
// eieio
// oieie
// #===# --> 2
{"eieio", "oieie", "", nil, e{2, 0, 0, 3. / 5, 3. / 5}},
// brad+angelina
// bra ngelina
// ===+++======= --> 3
{"brad+angelina", "brangelina", "", nil, e{3, 3, 7, 10. / 13, 10.9 / 13}},
// test international chars
// naive
// naïve
// ==#== --> 1
{"naive", "naïve", "", nil, e{1, 2, 2, 4. / 5, 4.2 / 5}},
}
)
for _, c := range cases {
par := c.p
if par == nil {
par = defaultParams
}
cost, lp, ls := Calculate([]rune(c.s1), []rune(c.s2), par.maxCost, par.insCost, par.subCost, par.delCost)
if cost != c.exp.cost {
t.Errorf("Cost: %q -> %q%s: got %d, want %d", c.s1, c.s2, c.desc, cost, c.exp.cost)
}
if lp != c.exp.lp {
t.Errorf("Prefix: %q -> %q%s: got %d, want %d", c.s1, c.s2, c.desc, lp, c.exp.lp)
}
if ls != c.exp.ls {
t.Errorf("Suffix: %q -> %q%s: got %d, want %d", c.s1, c.s2, c.desc, ls, c.exp.ls)
}
dist := Distance(c.s1, c.s2, c.p)
if dist != c.exp.cost {
t.Errorf("Distance: %q -> %q%s: got %d, want %d", c.s1, c.s2, c.desc, dist, c.exp.cost)
}
sim := Similarity(c.s1, c.s2, c.p)
off := sim - c.exp.sim
if off < 0 {
off = -off
}
if off > 1e-15 {
t.Errorf("Similarity: %q -> %q%s: got %f, want %f (off %g)", c.s1, c.s2, c.desc, sim, c.exp.sim, off)
}
match := Match(c.s1, c.s2, c.p)
off = match - c.exp.match
if off < 0 {
off = -off
}
if off > 1e-15 {
t.Errorf("Match: %q -> %q%s: got %f, want %f (off %g)", c.s1, c.s2, c.desc, match, c.exp.match, off)
}
}
}

View File

@ -1,152 +0,0 @@
// Copyright 2016 ALRUX Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package levenshtein
// Params represents a set of parameter values for the various formulas involved
// in the calculation of the Levenshtein string metrics.
type Params struct {
insCost int
subCost int
delCost int
maxCost int
minScore float64
bonusPrefix int
bonusScale float64
bonusThreshold float64
}
var (
defaultParams = NewParams()
)
// NewParams creates a new set of parameters and initializes it with the default values.
func NewParams() *Params {
return &Params{
insCost: 1,
subCost: 1,
delCost: 1,
maxCost: 0,
minScore: 0,
bonusPrefix: 4,
bonusScale: .1,
bonusThreshold: .7,
}
}
// Clone returns a pointer to a copy of the receiver parameter set, or of a new
// default parameter set if the receiver is nil.
func (p *Params) Clone() *Params {
if p == nil {
return NewParams()
}
return &Params{
insCost: p.insCost,
subCost: p.subCost,
delCost: p.delCost,
maxCost: p.maxCost,
minScore: p.minScore,
bonusPrefix: p.bonusPrefix,
bonusScale: p.bonusScale,
bonusThreshold: p.bonusThreshold,
}
}
// InsCost overrides the default value of 1 for the cost of insertion.
// The new value must be zero or positive.
func (p *Params) InsCost(v int) *Params {
if v >= 0 {
p.insCost = v
}
return p
}
// SubCost overrides the default value of 1 for the cost of substitution.
// The new value must be zero or positive.
func (p *Params) SubCost(v int) *Params {
if v >= 0 {
p.subCost = v
}
return p
}
// DelCost overrides the default value of 1 for the cost of deletion.
// The new value must be zero or positive.
func (p *Params) DelCost(v int) *Params {
if v >= 0 {
p.delCost = v
}
return p
}
// MaxCost overrides the default value of 0 (meaning unlimited) for the maximum cost.
// The calculation of Distance() stops when the result is guaranteed to exceed
// this maximum, returning a lower-bound rather than exact value.
// The new value must be zero or positive.
func (p *Params) MaxCost(v int) *Params {
if v >= 0 {
p.maxCost = v
}
return p
}
// MinScore overrides the default value of 0 for the minimum similarity score.
// Scores below this threshold are returned as 0 by Similarity() and Match().
// The new value must be zero or positive. Note that a minimum greater than 1
// can never be satisfied, resulting in a score of 0 for any pair of strings.
func (p *Params) MinScore(v float64) *Params {
if v >= 0 {
p.minScore = v
}
return p
}
// BonusPrefix overrides the default value for the maximum length of
// common prefix to be considered for bonus by Match().
// The new value must be zero or positive.
func (p *Params) BonusPrefix(v int) *Params {
if v >= 0 {
p.bonusPrefix = v
}
return p
}
// BonusScale overrides the default value for the scaling factor used by Match()
// in calculating the bonus.
// The new value must be zero or positive. To guarantee that the similarity score
// remains in the interval 0..1, this scaling factor is not allowed to exceed
// 1 / BonusPrefix.
func (p *Params) BonusScale(v float64) *Params {
if v >= 0 {
p.bonusScale = v
}
// the bonus cannot exceed (1-sim), or the score may become greater than 1.
if float64(p.bonusPrefix)*p.bonusScale > 1 {
p.bonusScale = 1 / float64(p.bonusPrefix)
}
return p
}
// BonusThreshold overrides the default value for the minimum similarity score
// for which Match() can assign a bonus.
// The new value must be zero or positive. Note that a threshold greater than 1
// effectively makes Match() become the equivalent of Similarity().
func (p *Params) BonusThreshold(v float64) *Params {
if v >= 0 {
p.bonusThreshold = v
}
return p
}

View File

@ -1,145 +0,0 @@
// Copyright 2016 ALRUX Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package levenshtein
import (
"testing"
)
func Test_Params(t *testing.T) {
// Different allocations should not be equal.
if NewParams() == NewParams() {
t.Errorf(`NewParams() == NewParams()`)
}
// Clone should not be equal to original allocation.
p := NewParams()
if p == p.Clone() {
t.Errorf(`p == p.Clone()`)
}
// Defaults
if p.insCost != 1 {
t.Errorf(`NewParams().insCost == %v, want %v`, p.insCost, 1)
}
if p.subCost != 1 {
t.Errorf(`NewParams().subCost == %v, want %v`, p.subCost, 1)
}
if p.delCost != 1 {
t.Errorf(`NewParams().delCost == %v, want %v`, p.delCost, 1)
}
if p.maxCost != 0 {
t.Errorf(`NewParams().maxCost == %v, want %v`, p.maxCost, 0)
}
if p.minScore != 0 {
t.Errorf(`NewParams().minScore == %v, want %v`, p.minScore, 0)
}
if p.bonusPrefix != 4 {
t.Errorf(`NewParams().bonusPrefix == %v, want %v`, p.bonusPrefix, 4)
}
if p.bonusScale != .1 {
t.Errorf(`NewParams().bonusScale == %v, want %v`, p.bonusScale, .1)
}
if p.bonusThreshold != .7 {
t.Errorf(`NewParams().bonusThreshold == %v, want %v`, p.bonusThreshold, .7)
}
// Setters
if p = NewParams().InsCost(2); p.insCost != 2 {
t.Errorf(`NewParams().InsCost(2).insCost == %v, want %v`, p.insCost, 2)
}
if p = NewParams().InsCost(-2); p.insCost != 1 {
t.Errorf(`NewParams().InsCost(-2).insCost == %v, want %v`, p.insCost, 1)
}
if p = NewParams().SubCost(3); p.subCost != 3 {
t.Errorf(`NewParams().SubCost(3).subCost == %v, want %v`, p.subCost, 3)
}
if p = NewParams().SubCost(-3); p.subCost != 1 {
t.Errorf(`NewParams().SubCost(-3).subCost == %v, want %v`, p.subCost, 1)
}
if p = NewParams().DelCost(5); p.delCost != 5 {
t.Errorf(`NewParams().DelCost(5).delCost == %v, want %v`, p.delCost, 5)
}
if p = NewParams().DelCost(-1); p.delCost != 1 {
t.Errorf(`NewParams().DelCost(-1).delCost == %v, want %v`, p.delCost, 1)
}
if p = NewParams().MaxCost(7); p.maxCost != 7 {
t.Errorf(`NewParams().MaxCost(7).maxCost == %v, want %v`, p.maxCost, 7)
}
if p = NewParams().MaxCost(-5); p.maxCost != 0 {
t.Errorf(`NewParams().MaxCost(-5).maxCost == %v, want %v`, p.maxCost, 0)
}
if p = NewParams().MinScore(.5); p.minScore != .5 {
t.Errorf(`NewParams().MinScore(.5).minScore == %v, want %v`, p.minScore, .5)
}
if p = NewParams().MinScore(3); p.minScore != 3 {
t.Errorf(`NewParams().MinScore(3).minScore == %v, want %v`, p.minScore, 3)
}
if p = NewParams().MinScore(-5); p.minScore != 0 {
t.Errorf(`NewParams().MinScore(-5).minScore == %v, want %v`, p.minScore, 0)
}
if p = NewParams().BonusPrefix(7); p.bonusPrefix != 7 {
t.Errorf(`NewParams().BonusPrefix(7).bonusPrefix == %v, want %v`, p.bonusPrefix, 7)
}
if p = NewParams().BonusPrefix(-5); p.bonusPrefix != 4 {
t.Errorf(`NewParams().BonusPrefix(-5).bonusPrefix == %v, want %v`, p.bonusPrefix, 4)
}
if p = NewParams().BonusScale(.2); p.bonusScale != .2 {
t.Errorf(`NewParams().BonusScale(.2).bonusScale == %v, want %v`, p.bonusScale, .2)
}
if p = NewParams().BonusScale(-.3); p.bonusScale != .1 {
t.Errorf(`NewParams().BonusScale(-.3).bonusScale == %v, want %v`, p.bonusScale, .1)
}
if p = NewParams().BonusScale(7); p.bonusScale != 1/float64(p.bonusPrefix) {
t.Errorf(`NewParams().BonusScale(7).bonusScale == %v, want %v`, p.bonusScale, 1/float64(p.bonusPrefix))
}
if p = NewParams().BonusThreshold(.3); p.bonusThreshold != .3 {
t.Errorf(`NewParams().BonusThreshold(.3).bonusThreshold == %v, want %v`, p.bonusThreshold, .3)
}
if p = NewParams().BonusThreshold(7); p.bonusThreshold != 7 {
t.Errorf(`NewParams().BonusThreshold(7).bonusThreshold == %v, want %v`, p.bonusThreshold, 7)
}
if p = NewParams().BonusThreshold(-7); p.bonusThreshold != .7 {
t.Errorf(`NewParams().BonusThreshold(-7).bonusThreshold == %v, want %v`, p.bonusThreshold, .7)
}
// Cloning nil pointer should initiate with default values
var p1 *Params
p2 := p1.Clone()
if p2.insCost != 1 {
t.Errorf(`nil.Clone().insCost == %v, want %v`, p2.insCost, 1)
}
if p2.subCost != 1 {
t.Errorf(`nil.Clone().subCost == %v, want %v`, p2.subCost, 1)
}
if p2.delCost != 1 {
t.Errorf(`nil.Clone().delCost == %v, want %v`, p2.delCost, 1)
}
if p2.maxCost != 0 {
t.Errorf(`nil.Clone().maxCost == %v, want %v`, p2.maxCost, 0)
}
if p2.minScore != 0 {
t.Errorf(`nil.Clone().minScore == %v, want %v`, p2.minScore, 0)
}
if p2.bonusPrefix != 4 {
t.Errorf(`nil.Clone().bonusPrefix == %v, want %v`, p2.bonusPrefix, 4)
}
if p2.bonusScale != .1 {
t.Errorf(`nil.Clone().bonusScale == %v, want %v`, p2.bonusScale, .1)
}
if p2.bonusThreshold != .7 {
t.Errorf(`nil.Clone().bonusThreshold == %v, want %v`, p2.bonusThreshold, .7)
}
}

View File

@ -1,40 +0,0 @@
# gluaxmlpath
gluaxmlpath provides an easy way to use [xmlpath](https://github.com/go-xmlpath/xmlpath) from within [GopherLua](https://github.com/yuin/gopher-lua).
## Installation
```
go get github.com/ailncode/gluaxmlpath
```
## Usage
```go
package main
import (
"github.com/ailncode/gluaxmlpath"
"github.com/yuin/gopher-lua"
)
func main() {
L := lua.NewState()
defer L.Close()
gluaxmlpath.Preload(L)
if err := L.DoString(`
xml ="<bookist><book>x1</book><book>x2</book><book>x3</book></booklist>"
local xmlpath = require("xmlpath")
node,err = xmlpath.loadxml(xml)
path,err = xmlpath.compile("//book")
it = path:iter(node)
for k,v in pairs(it) do
print(k,v:string())
end
`); err != nil {
panic(err)
}
}
```

View File

@ -1,37 +0,0 @@
package gluaxmlpath
import (
"bytes"
"github.com/yuin/gopher-lua"
xmlpath "gopkg.in/xmlpath.v2"
)
var api = map[string]lua.LGFunction{
"loadxml": loadXml,
"compile": compile,
}
func loadXml(L *lua.LState) int {
xmlStr := L.CheckString(1)
r := bytes.NewReader([]byte(xmlStr))
node, err := xmlpath.ParseHTML(r)
if err != nil {
L.Push(lua.LNil)
L.Push(lua.LString(err.Error()))
return 2
}
L.Push(newNode(L, node))
return 1
}
func compile(L *lua.LState) int {
xpathStr := L.CheckString(1)
path, err := xmlpath.Compile(xpathStr)
if err != nil {
L.Push(lua.LNil)
L.Push(lua.LString(err.Error()))
return 2
}
L.Push(newPath(L, path))
return 1
}

View File

@ -1,21 +0,0 @@
package gluaxmlpath
import (
"github.com/yuin/gopher-lua"
)
// Preload adds xmlpath to the given Lua state's package.preload table. After it
// has been preloaded, it can be loaded using require:
//
// local xmlpath = require("xmlpath")
func Preload(L *lua.LState) {
L.PreloadModule("xmlpath", Loader)
}
// Loader is the module loader function.
func Loader(L *lua.LState) int {
mod := L.SetFuncs(L.NewTable(), api)
registerType(L, mod)
L.Push(mod)
return 1
}

View File

@ -1,135 +0,0 @@
package gluaxmlpath
import (
"github.com/yuin/gopher-lua"
xmlpath "gopkg.in/xmlpath.v2"
)
type Node struct {
base *xmlpath.Node
}
type Path struct {
base *xmlpath.Path
}
type Iter struct {
base *xmlpath.Iter
}
const luaNodeTypeName = "xmlpath.node"
const luaPathTypeName = "xmlpath.path"
const luaIterTypeName = "xmlpath.iter"
func registerType(L *lua.LState, module *lua.LTable) {
//reg node
nodemt := L.NewTypeMetatable(luaNodeTypeName)
L.SetField(module, "node", nodemt)
L.SetField(nodemt, "__index", L.SetFuncs(L.NewTable(), map[string]lua.LGFunction{
"string": nodeString,
}))
//reg path
pathmt := L.NewTypeMetatable(luaPathTypeName)
L.SetField(module, "path", pathmt)
L.SetField(pathmt, "__index", L.SetFuncs(L.NewTable(), map[string]lua.LGFunction{
"iter": iter,
}))
//reg iter
itermt := L.NewTypeMetatable(luaIterTypeName)
L.SetField(module, "iter", itermt)
L.SetField(itermt, "__index", L.SetFuncs(L.NewTable(), map[string]lua.LGFunction{
//"next": next,
"node": node,
}))
}
func newNode(L *lua.LState, n *xmlpath.Node) *lua.LUserData {
ud := L.NewUserData()
ud.Value = &Node{
n,
}
L.SetMetatable(ud, L.GetTypeMetatable(luaNodeTypeName))
return ud
}
func checkNode(L *lua.LState) *Node {
ud := L.CheckUserData(1)
if v, ok := ud.Value.(*Node); ok {
return v
}
L.ArgError(1, "node expected")
return nil
}
func newPath(L *lua.LState, p *xmlpath.Path) *lua.LUserData {
ud := L.NewUserData()
ud.Value = &Path{
p,
}
L.SetMetatable(ud, L.GetTypeMetatable(luaPathTypeName))
return ud
}
func checkPath(L *lua.LState) *Path {
ud := L.CheckUserData(1)
if v, ok := ud.Value.(*Path); ok {
return v
}
L.ArgError(1, "path expected")
return nil
}
func newIter(L *lua.LState, i *xmlpath.Iter) *lua.LUserData {
ud := L.NewUserData()
ud.Value = &Iter{
i,
}
L.SetMetatable(ud, L.GetTypeMetatable(luaIterTypeName))
return ud
}
func checkIter(L *lua.LState) *Iter {
ud := L.CheckUserData(1)
if v, ok := ud.Value.(*Iter); ok {
return v
}
L.ArgError(1, "iter expected")
return nil
}
//iter := path.iter(node)
func iter(L *lua.LState) int {
path := checkPath(L)
if L.GetTop() == 2 {
ut := L.CheckUserData(2)
if node, ok := ut.Value.(*Node); ok {
it := path.base.Iter(node.base)
ltab := L.NewTable()
i := 1
for it.Next() {
L.RawSetInt(ltab, i, newNode(L, it.Node()))
i++
}
L.Push(ltab)
//L.Push(newIter(L, it))
return 1
}
}
L.ArgError(1, "node expected")
return 0
}
//support lua standard iterator
//hasNext := iter.next()
// func next(L *lua.LState) int {
// iter := checkIter(L)
// L.Push(lua.LBool(iter.base.Next()))
// return 1
// }
//node := iter.node()
func node(L *lua.LState) int {
iter := checkIter(L)
L.Push(newNode(L, iter.base.Node()))
return 1
}
//string := node.string()
func nodeString(L *lua.LState) int {
node := checkNode(L)
L.Push(lua.LString(node.base.String()))
return 1
}

View File

@ -1,7 +0,0 @@
language: go
go:
- 1.x
- 1.7.x
- 1.8.x
- master

View File

@ -1,19 +0,0 @@
Copyright (c) 2015 Martin Atkins
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,210 +0,0 @@
// Package cidr is a collection of assorted utilities for computing
// network and host addresses within network ranges.
//
// It expects a CIDR-type address structure where addresses are divided into
// some number of prefix bits representing the network and then the remaining
// suffix bits represent the host.
//
// For example, it can help to calculate addresses for sub-networks of a
// parent network, or to calculate host addresses within a particular prefix.
//
// At present this package is prioritizing simplicity of implementation and
// de-prioritizing speed and memory usage. Thus caution is advised before
// using this package in performance-critical applications or hot codepaths.
// Patches to improve the speed and memory usage may be accepted as long as
// they do not result in a significant increase in code complexity.
package cidr
import (
"fmt"
"math/big"
"net"
)
// Subnet takes a parent CIDR range and creates a subnet within it
// with the given number of additional prefix bits and the given
// network number.
//
// For example, 10.3.0.0/16, extended by 8 bits, with a network number
// of 5, becomes 10.3.5.0/24 .
func Subnet(base *net.IPNet, newBits int, num int) (*net.IPNet, error) {
ip := base.IP
mask := base.Mask
parentLen, addrLen := mask.Size()
newPrefixLen := parentLen + newBits
if newPrefixLen > addrLen {
return nil, fmt.Errorf("insufficient address space to extend prefix of %d by %d", parentLen, newBits)
}
maxNetNum := uint64(1<<uint64(newBits)) - 1
if uint64(num) > maxNetNum {
return nil, fmt.Errorf("prefix extension of %d does not accommodate a subnet numbered %d", newBits, num)
}
return &net.IPNet{
IP: insertNumIntoIP(ip, num, newPrefixLen),
Mask: net.CIDRMask(newPrefixLen, addrLen),
}, nil
}
// Host takes a parent CIDR range and turns it into a host IP address with
// the given host number.
//
// For example, 10.3.0.0/16 with a host number of 2 gives 10.3.0.2.
func Host(base *net.IPNet, num int) (net.IP, error) {
ip := base.IP
mask := base.Mask
parentLen, addrLen := mask.Size()
hostLen := addrLen - parentLen
maxHostNum := uint64(1<<uint64(hostLen)) - 1
numUint64 := uint64(num)
if num < 0 {
numUint64 = uint64(-num) - 1
num = int(maxHostNum - numUint64)
}
if numUint64 > maxHostNum {
return nil, fmt.Errorf("prefix of %d does not accommodate a host numbered %d", parentLen, num)
}
var bitlength int
if ip.To4() != nil {
bitlength = 32
} else {
bitlength = 128
}
return insertNumIntoIP(ip, num, bitlength), nil
}
// AddressRange returns the first and last addresses in the given CIDR range.
func AddressRange(network *net.IPNet) (net.IP, net.IP) {
// the first IP is easy
firstIP := network.IP
// the last IP is the network address OR NOT the mask address
prefixLen, bits := network.Mask.Size()
if prefixLen == bits {
// Easy!
// But make sure that our two slices are distinct, since they
// would be in all other cases.
lastIP := make([]byte, len(firstIP))
copy(lastIP, firstIP)
return firstIP, lastIP
}
firstIPInt, bits := ipToInt(firstIP)
hostLen := uint(bits) - uint(prefixLen)
lastIPInt := big.NewInt(1)
lastIPInt.Lsh(lastIPInt, hostLen)
lastIPInt.Sub(lastIPInt, big.NewInt(1))
lastIPInt.Or(lastIPInt, firstIPInt)
return firstIP, intToIP(lastIPInt, bits)
}
// AddressCount returns the number of distinct host addresses within the given
// CIDR range.
//
// Since the result is a uint64, this function returns meaningful information
// only for IPv4 ranges and IPv6 ranges with a prefix size of at least 65.
func AddressCount(network *net.IPNet) uint64 {
prefixLen, bits := network.Mask.Size()
return 1 << (uint64(bits) - uint64(prefixLen))
}
//VerifyNoOverlap takes a list subnets and supernet (CIDRBlock) and verifies
//none of the subnets overlap and all subnets are in the supernet
//it returns an error if any of those conditions are not satisfied
func VerifyNoOverlap(subnets []*net.IPNet, CIDRBlock *net.IPNet) error {
firstLastIP := make([][]net.IP, len(subnets))
for i, s := range subnets {
first, last := AddressRange(s)
firstLastIP[i] = []net.IP{first, last}
}
for i, s := range subnets {
if !CIDRBlock.Contains(firstLastIP[i][0]) || !CIDRBlock.Contains(firstLastIP[i][1]) {
return fmt.Errorf("%s does not fully contain %s", CIDRBlock.String(), s.String())
}
for j := i + 1; j < len(subnets); j++ {
first := firstLastIP[j][0]
last := firstLastIP[j][1]
if s.Contains(first) || s.Contains(last) {
return fmt.Errorf("%s overlaps with %s", subnets[j].String(), s.String())
}
}
}
return nil
}
// PreviousSubnet returns the subnet of the desired mask in the IP space
// just lower than the start of IPNet provided. If the IP space rolls over
// then the second return value is true
func PreviousSubnet(network *net.IPNet, prefixLen int) (*net.IPNet, bool) {
startIP := checkIPv4(network.IP)
previousIP := make(net.IP, len(startIP))
copy(previousIP, startIP)
cMask := net.CIDRMask(prefixLen, 8*len(previousIP))
previousIP = Dec(previousIP)
previous := &net.IPNet{IP: previousIP.Mask(cMask), Mask: cMask}
if startIP.Equal(net.IPv4zero) || startIP.Equal(net.IPv6zero) {
return previous, true
}
return previous, false
}
// NextSubnet returns the next available subnet of the desired mask size
// starting for the maximum IP of the offset subnet
// If the IP exceeds the maxium IP then the second return value is true
func NextSubnet(network *net.IPNet, prefixLen int) (*net.IPNet, bool) {
_, currentLast := AddressRange(network)
mask := net.CIDRMask(prefixLen, 8*len(currentLast))
currentSubnet := &net.IPNet{IP: currentLast.Mask(mask), Mask: mask}
_, last := AddressRange(currentSubnet)
last = Inc(last)
next := &net.IPNet{IP: last.Mask(mask), Mask: mask}
if last.Equal(net.IPv4zero) || last.Equal(net.IPv6zero) {
return next, true
}
return next, false
}
//Inc increases the IP by one this returns a new []byte for the IP
func Inc(IP net.IP) net.IP {
IP = checkIPv4(IP)
incIP := make([]byte, len(IP))
copy(incIP, IP)
for j := len(incIP) - 1; j >= 0; j-- {
incIP[j]++
if incIP[j] > 0 {
break
}
}
return incIP
}
//Dec decreases the IP by one this returns a new []byte for the IP
func Dec(IP net.IP) net.IP {
IP = checkIPv4(IP)
decIP := make([]byte, len(IP))
copy(decIP, IP)
decIP = checkIPv4(decIP)
for j := len(decIP) - 1; j >= 0; j-- {
decIP[j]--
if decIP[j] < 255 {
break
}
}
return decIP
}
func checkIPv4(ip net.IP) net.IP {
// Go for some reason allocs IPv6len for IPv4 so we have to correct it
if v4 := ip.To4(); v4 != nil {
return v4
}
return ip
}

View File

@ -1,438 +0,0 @@
package cidr
import (
"bytes"
"fmt"
"net"
"strconv"
"testing"
)
func TestSubnet(t *testing.T) {
type Case struct {
Base string
Bits int
Num int
Output string
Error bool
}
cases := []Case{
Case{
Base: "192.168.2.0/20",
Bits: 4,
Num: 6,
Output: "192.168.6.0/24",
},
Case{
Base: "192.168.2.0/20",
Bits: 4,
Num: 0,
Output: "192.168.0.0/24",
},
Case{
Base: "192.168.0.0/31",
Bits: 1,
Num: 1,
Output: "192.168.0.1/32",
},
Case{
Base: "192.168.0.0/21",
Bits: 4,
Num: 7,
Output: "192.168.3.128/25",
},
Case{
Base: "fe80::/48",
Bits: 16,
Num: 6,
Output: "fe80:0:0:6::/64",
},
Case{
Base: "fe80::/49",
Bits: 16,
Num: 7,
Output: "fe80:0:0:3:8000::/65",
},
Case{
Base: "192.168.2.0/31",
Bits: 2,
Num: 0,
Error: true, // not enough bits to expand into
},
Case{
Base: "fe80::/126",
Bits: 4,
Num: 0,
Error: true, // not enough bits to expand into
},
Case{
Base: "192.168.2.0/24",
Bits: 4,
Num: 16,
Error: true, // can't fit 16 into 4 bits
},
}
for _, testCase := range cases {
_, base, _ := net.ParseCIDR(testCase.Base)
gotNet, err := Subnet(base, testCase.Bits, testCase.Num)
desc := fmt.Sprintf("Subnet(%#v,%#v,%#v)", testCase.Base, testCase.Bits, testCase.Num)
if err != nil {
if !testCase.Error {
t.Errorf("%s failed: %s", desc, err.Error())
}
} else {
got := gotNet.String()
if testCase.Error {
t.Errorf("%s = %s; want error", desc, got)
} else {
if got != testCase.Output {
t.Errorf("%s = %s; want %s", desc, got, testCase.Output)
}
}
}
}
}
func TestHost(t *testing.T) {
type Case struct {
Range string
Num int
Output string
Error bool
}
cases := []Case{
Case{
Range: "192.168.2.0/20",
Num: 6,
Output: "192.168.0.6",
},
Case{
Range: "192.168.0.0/20",
Num: 257,
Output: "192.168.1.1",
},
Case{
Range: "2001:db8::/32",
Num: 1,
Output: "2001:db8::1",
},
Case{
Range: "192.168.1.0/24",
Num: 256,
Error: true, // only 0-255 will fit in 8 bits
},
Case{
Range: "192.168.0.0/30",
Num: -3,
Output: "192.168.0.1", // 4 address (0-3) in 2 bits; 3rd from end = 1
},
Case{
Range: "192.168.0.0/30",
Num: -4,
Output: "192.168.0.0", // 4 address (0-3) in 2 bits; 4th from end = 0
},
Case{
Range: "192.168.0.0/30",
Num: -5,
Error: true, // 4 address (0-3) in 2 bits; cannot accomodate 5
},
}
for _, testCase := range cases {
_, network, _ := net.ParseCIDR(testCase.Range)
gotIP, err := Host(network, testCase.Num)
desc := fmt.Sprintf("Host(%#v,%#v)", testCase.Range, testCase.Num)
if err != nil {
if !testCase.Error {
t.Errorf("%s failed: %s", desc, err.Error())
}
} else {
got := gotIP.String()
if testCase.Error {
t.Errorf("%s = %s; want error", desc, got)
} else {
if got != testCase.Output {
t.Errorf("%s = %s; want %s", desc, got, testCase.Output)
}
}
}
}
}
func TestAddressRange(t *testing.T) {
type Case struct {
Range string
First string
Last string
}
cases := []Case{
Case{
Range: "192.168.0.0/16",
First: "192.168.0.0",
Last: "192.168.255.255",
},
Case{
Range: "192.168.0.0/17",
First: "192.168.0.0",
Last: "192.168.127.255",
},
Case{
Range: "fe80::/64",
First: "fe80::",
Last: "fe80::ffff:ffff:ffff:ffff",
},
}
for _, testCase := range cases {
_, network, _ := net.ParseCIDR(testCase.Range)
firstIP, lastIP := AddressRange(network)
desc := fmt.Sprintf("AddressRange(%#v)", testCase.Range)
gotFirstIP := firstIP.String()
gotLastIP := lastIP.String()
if gotFirstIP != testCase.First {
t.Errorf("%s first is %s; want %s", desc, gotFirstIP, testCase.First)
}
if gotLastIP != testCase.Last {
t.Errorf("%s last is %s; want %s", desc, gotLastIP, testCase.Last)
}
}
}
func TestAddressCount(t *testing.T) {
type Case struct {
Range string
Count uint64
}
cases := []Case{
Case{
Range: "192.168.0.0/16",
Count: 65536,
},
Case{
Range: "192.168.0.0/17",
Count: 32768,
},
Case{
Range: "192.168.0.0/32",
Count: 1,
},
Case{
Range: "192.168.0.0/31",
Count: 2,
},
Case{
Range: "0.0.0.0/0",
Count: 4294967296,
},
Case{
Range: "0.0.0.0/1",
Count: 2147483648,
},
Case{
Range: "::/65",
Count: 9223372036854775808,
},
Case{
Range: "::/128",
Count: 1,
},
Case{
Range: "::/127",
Count: 2,
},
}
for _, testCase := range cases {
_, network, _ := net.ParseCIDR(testCase.Range)
gotCount := AddressCount(network)
desc := fmt.Sprintf("AddressCount(%#v)", testCase.Range)
if gotCount != testCase.Count {
t.Errorf("%s = %d; want %d", desc, gotCount, testCase.Count)
}
}
}
func TestIncDec(t *testing.T) {
testCase := [][]string{
[]string{"0.0.0.0", "0.0.0.1"},
[]string{"10.0.0.0", "10.0.0.1"},
[]string{"9.255.255.255", "10.0.0.0"},
[]string{"255.255.255.255", "0.0.0.0"},
[]string{"::", "::1"},
[]string{"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", "::"},
[]string{"2001:db8:c001:ba00::", "2001:db8:c001:ba00::1"},
}
for _, tc := range testCase {
ip1 := net.ParseIP(tc[0])
ip2 := net.ParseIP(tc[1])
iIP := Inc(ip1)
if !iIP.Equal(ip2) {
t.Logf("%s should inc to equal %s\n", tc[0], tc[1])
t.Errorf("%v should equal %v\n", iIP, ip2)
}
if ip1.Equal(ip2) {
t.Errorf("[%v] should not have been modified to [%v]", ip2, iIP)
}
}
for _, tc := range testCase {
ip1 := net.ParseIP(tc[0])
ip2 := net.ParseIP(tc[1])
dIP := Dec(ip2)
if !ip1.Equal(dIP) {
t.Logf("%s should dec equal %s\n", tc[0], tc[1])
t.Errorf("%v should equal %v\n", ip1, dIP)
}
if ip2.Equal(dIP) {
t.Errorf("[%v] should not have been modified to [%v]", ip2, dIP)
}
}
}
func TestPreviousSubnet(t *testing.T) {
testCases := [][]string{
[]string{"10.0.0.0/24", "9.255.255.0/24", "false"},
[]string{"100.0.0.0/26", "99.255.255.192/26", "false"},
[]string{"0.0.0.0/26", "255.255.255.192/26", "true"},
[]string{"2001:db8:e000::/36", "2001:db8:d000::/36", "false"},
[]string{"::/64", "ffff:ffff:ffff:ffff::/64", "true"},
}
for _, tc := range testCases {
_, c1, _ := net.ParseCIDR(tc[0])
_, c2, _ := net.ParseCIDR(tc[1])
mask, _ := c1.Mask.Size()
p1, rollback := PreviousSubnet(c1, mask)
if !p1.IP.Equal(c2.IP) {
t.Errorf("IP expected %v, got %v\n", c2.IP, p1.IP)
}
if !bytes.Equal(p1.Mask, c2.Mask) {
t.Errorf("Mask expected %v, got %v\n", c2.Mask, p1.Mask)
}
if p1.String() != c2.String() {
t.Errorf("%s should have been equal %s\n", p1.String(), c2.String())
}
if check, _ := strconv.ParseBool(tc[2]); rollback != check {
t.Errorf("%s to %s should have rolled\n", tc[0], tc[1])
}
}
for _, tc := range testCases {
_, c1, _ := net.ParseCIDR(tc[0])
_, c2, _ := net.ParseCIDR(tc[1])
mask, _ := c1.Mask.Size()
n1, rollover := NextSubnet(c2, mask)
if !n1.IP.Equal(c1.IP) {
t.Errorf("IP expected %v, got %v\n", c1.IP, n1.IP)
}
if !bytes.Equal(n1.Mask, c1.Mask) {
t.Errorf("Mask expected %v, got %v\n", c1.Mask, n1.Mask)
}
if n1.String() != c1.String() {
t.Errorf("%s should have been equal %s\n", n1.String(), c1.String())
}
if check, _ := strconv.ParseBool(tc[2]); rollover != check {
t.Errorf("%s to %s should have rolled\n", tc[0], tc[1])
}
}
}
func TestVerifyNetowrk(t *testing.T) {
type testVerifyNetwork struct {
CIDRBlock string
CIDRList []string
}
testCases := []*testVerifyNetwork{
&testVerifyNetwork{
CIDRBlock: "192.168.8.0/21",
CIDRList: []string{
"192.168.8.0/24",
"192.168.9.0/24",
"192.168.10.0/24",
"192.168.11.0/25",
"192.168.11.128/25",
"192.168.12.0/25",
"192.168.12.128/26",
"192.168.12.192/26",
"192.168.13.0/26",
"192.168.13.64/27",
"192.168.13.96/27",
"192.168.13.128/27",
},
},
}
failCases := []*testVerifyNetwork{
&testVerifyNetwork{
CIDRBlock: "192.168.8.0/21",
CIDRList: []string{
"192.168.8.0/24",
"192.168.9.0/24",
"192.168.10.0/24",
"192.168.11.0/25",
"192.168.11.128/25",
"192.168.12.0/25",
"192.168.12.64/26",
"192.168.12.128/26",
},
},
&testVerifyNetwork{
CIDRBlock: "192.168.8.0/21",
CIDRList: []string{
"192.168.7.0/24",
"192.168.9.0/24",
"192.168.10.0/24",
"192.168.11.0/25",
"192.168.11.128/25",
"192.168.12.0/25",
"192.168.12.64/26",
"192.168.12.128/26",
},
},
}
for _, tc := range testCases {
subnets := make([]*net.IPNet, len(tc.CIDRList))
for i, s := range tc.CIDRList {
_, n, err := net.ParseCIDR(s)
if err != nil {
t.Errorf("Bad test data %s\n", s)
}
subnets[i] = n
}
_, CIDRBlock, perr := net.ParseCIDR(tc.CIDRBlock)
if perr != nil {
t.Errorf("Bad test data %s\n", tc.CIDRBlock)
}
test := VerifyNoOverlap(subnets, CIDRBlock)
if test != nil {
t.Errorf("Failed test with %v\n", test)
}
}
for _, tc := range failCases {
subnets := make([]*net.IPNet, len(tc.CIDRList))
for i, s := range tc.CIDRList {
_, n, err := net.ParseCIDR(s)
if err != nil {
t.Errorf("Bad test data %s\n", s)
}
subnets[i] = n
}
_, CIDRBlock, perr := net.ParseCIDR(tc.CIDRBlock)
if perr != nil {
t.Errorf("Bad test data %s\n", tc.CIDRBlock)
}
test := VerifyNoOverlap(subnets, CIDRBlock)
if test == nil {
t.Errorf("Test should have failed with CIDR %s\n", tc.CIDRBlock)
}
}
}

View File

@ -1,38 +0,0 @@
package cidr
import (
"fmt"
"math/big"
"net"
)
func ipToInt(ip net.IP) (*big.Int, int) {
val := &big.Int{}
val.SetBytes([]byte(ip))
if len(ip) == net.IPv4len {
return val, 32
} else if len(ip) == net.IPv6len {
return val, 128
} else {
panic(fmt.Errorf("Unsupported address length %d", len(ip)))
}
}
func intToIP(ipInt *big.Int, bits int) net.IP {
ipBytes := ipInt.Bytes()
ret := make([]byte, bits/8)
// Pack our IP bytes into the end of the return array,
// since big.Int.Bytes() removes front zero padding.
for i := 1; i <= len(ipBytes); i++ {
ret[len(ret)-i] = ipBytes[len(ipBytes)-i]
}
return net.IP(ret)
}
func insertNumIntoIP(ip net.IP, num int, prefixLen int) net.IP {
ipInt, totalBits := ipToInt(ip)
bigNum := big.NewInt(int64(num))
bigNum.Lsh(bigNum, uint(totalBits-prefixLen))
ipInt.Or(ipInt, bigNum)
return intToIP(ipInt, totalBits)
}

View File

@ -1,19 +0,0 @@
language: go
go:
- 1.8.x
- tip
matrix:
fast_finish: true
allow_failures:
- go: tip
before_install:
- go get -t -v ./...
script:
- go test -coverprofile=coverage.txt -covermode=atomic ./textseg
after_success:
- bash <(curl -s https://codecov.io/bash)

View File

@ -1,95 +0,0 @@
Copyright (c) 2017 Martin Atkins
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
---------
Unicode table generation programs are under a separate copyright and license:
Copyright (c) 2014 Couchbase, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied. See the License for the specific language governing permissions
and limitations under the License.
---------
Grapheme break data is provided as part of the Unicode character database,
copright 2016 Unicode, Inc, which is provided with the following license:
Unicode Data Files include all data files under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
Unicode Data Files do not include PDF online code charts under the
directory http://www.unicode.org/Public/.
Software includes any source code published in the Unicode Standard
or under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
NOTICE TO USER: Carefully read the following legal agreement.
BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
TERMS AND CONDITIONS OF THIS AGREEMENT.
IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
THE DATA FILES OR SOFTWARE.
COPYRIGHT AND PERMISSION NOTICE
Copyright © 1991-2017 Unicode, Inc. All rights reserved.
Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Unicode data files and any associated documentation
(the "Data Files") or Unicode software and any associated documentation
(the "Software") to deal in the Data Files or Software
without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, and/or sell copies of
the Data Files or Software, and to permit persons to whom the Data Files
or Software are furnished to do so, provided that either
(a) this copyright and permission notice appear with all copies
of the Data Files or Software, or
(b) this copyright and permission notice appear in associated
Documentation.
THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT OF THIRD PARTY RIGHTS.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THE DATA FILES OR SOFTWARE.
Except as contained in this notice, the name of a copyright holder
shall not be used in advertising or otherwise to promote the sale,
use or other dealings in these Data Files or Software without prior
written authorization of the copyright holder.

View File

@ -1,30 +0,0 @@
package textseg
import (
"bufio"
"bytes"
)
// AllTokens is a utility that uses a bufio.SplitFunc to produce a slice of
// all of the recognized tokens in the given buffer.
func AllTokens(buf []byte, splitFunc bufio.SplitFunc) ([][]byte, error) {
scanner := bufio.NewScanner(bytes.NewReader(buf))
scanner.Split(splitFunc)
var ret [][]byte
for scanner.Scan() {
ret = append(ret, scanner.Bytes())
}
return ret, scanner.Err()
}
// TokenCount is a utility that uses a bufio.SplitFunc to count the number of
// recognized tokens in the given buffer.
func TokenCount(buf []byte, splitFunc bufio.SplitFunc) (int, error) {
scanner := bufio.NewScanner(bytes.NewReader(buf))
scanner.Split(splitFunc)
var ret int
for scanner.Scan() {
ret++
}
return ret, scanner.Err()
}

View File

@ -1,68 +0,0 @@
package textseg
import (
"bufio"
"reflect"
"testing"
)
func TestAllTokens(t *testing.T) {
tests := []struct {
input string
want []string
}{
{
``,
[]string{},
},
{
`hello`,
[]string{
`hello`,
},
},
{
`hello world`,
[]string{
`hello`,
`world`,
},
},
{
`hello worldly world`,
[]string{
`hello`,
`worldly`,
`world`,
},
},
}
for _, test := range tests {
t.Run(test.input, func(t *testing.T) {
gotBytes, err := AllTokens([]byte(test.input), bufio.ScanWords)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
got := make([]string, len(gotBytes))
for i, buf := range gotBytes {
got[i] = string(buf)
}
if !reflect.DeepEqual(got, test.want) {
wantBytes := make([][]byte, len(test.want))
for i, str := range test.want {
wantBytes[i] = []byte(str)
}
t.Errorf(
"wrong result\ninput: %s\ngot: %s\nwant: %s",
formatBytes([]byte(test.input)),
formatByteRanges(gotBytes),
formatByteRanges(wantBytes),
)
}
})
}
}

View File

@ -1,7 +0,0 @@
package textseg
//go:generate go run make_tables.go -output tables.go
//go:generate go run make_test_tables.go -output tables_test.go
//go:generate ruby unicode2ragel.rb --url=http://www.unicode.org/Public/9.0.0/ucd/auxiliary/GraphemeBreakProperty.txt -m GraphemeCluster -p "Prepend,CR,LF,Control,Extend,Regional_Indicator,SpacingMark,L,V,T,LV,LVT,E_Base,E_Modifier,ZWJ,Glue_After_Zwj,E_Base_GAZ" -o grapheme_clusters_table.rl
//go:generate ragel -Z grapheme_clusters.rl
//go:generate gofmt -w grapheme_clusters.go

File diff suppressed because it is too large Load Diff

View File

@ -1,132 +0,0 @@
package textseg
import (
"errors"
"unicode/utf8"
)
// Generated from grapheme_clusters.rl. DO NOT EDIT
%%{
# (except you are actually in grapheme_clusters.rl here, so edit away!)
machine graphclust;
write data;
}%%
var Error = errors.New("invalid UTF8 text")
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
// on grapheme cluster boundaries.
func ScanGraphemeClusters(data []byte, atEOF bool) (int, []byte, error) {
if len(data) == 0 {
return 0, nil, nil
}
// Ragel state
cs := 0 // Current State
p := 0 // "Pointer" into data
pe := len(data) // End-of-data "pointer"
ts := 0
te := 0
act := 0
eof := pe
// Make Go compiler happy
_ = ts
_ = te
_ = act
_ = eof
startPos := 0
endPos := 0
%%{
include GraphemeCluster "grapheme_clusters_table.rl";
action start {
startPos = p
}
action end {
endPos = p
}
action emit {
return endPos+1, data[startPos:endPos+1], nil
}
ZWJGlue = ZWJ (Glue_After_Zwj | E_Base_GAZ Extend* E_Modifier?)?;
AnyExtender = Extend | ZWJGlue | SpacingMark;
Extension = AnyExtender*;
ReplacementChar = (0xEF 0xBF 0xBD);
CRLFSeq = CR LF;
ControlSeq = Control | ReplacementChar;
HangulSeq = (
L+ (((LV? V+ | LVT) T*)?|LV?) |
LV V* T* |
V+ T* |
LVT T* |
T+
) Extension;
EmojiSeq = (E_Base | E_Base_GAZ) Extend* E_Modifier? Extension;
ZWJSeq = ZWJGlue Extension;
EmojiFlagSeq = Regional_Indicator Regional_Indicator? Extension;
UTF8Cont = 0x80 .. 0xBF;
AnyUTF8 = (
0x00..0x7F |
0xC0..0xDF . UTF8Cont |
0xE0..0xEF . UTF8Cont . UTF8Cont |
0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
);
# OtherSeq is any character that isn't at the start of one of the extended sequences above, followed by extension
OtherSeq = (AnyUTF8 - (CR|LF|Control|ReplacementChar|L|LV|V|LVT|T|E_Base|E_Base_GAZ|ZWJ|Regional_Indicator|Prepend)) Extension;
# PrependSeq is prepend followed by any of the other patterns above, except control characters which explicitly break
PrependSeq = Prepend+ (HangulSeq|EmojiSeq|ZWJSeq|EmojiFlagSeq|OtherSeq)?;
CRLFTok = CRLFSeq >start @end;
ControlTok = ControlSeq >start @end;
HangulTok = HangulSeq >start @end;
EmojiTok = EmojiSeq >start @end;
ZWJTok = ZWJSeq >start @end;
EmojiFlagTok = EmojiFlagSeq >start @end;
OtherTok = OtherSeq >start @end;
PrependTok = PrependSeq >start @end;
main := |*
CRLFTok => emit;
ControlTok => emit;
HangulTok => emit;
EmojiTok => emit;
ZWJTok => emit;
EmojiFlagTok => emit;
PrependTok => emit;
OtherTok => emit;
# any single valid UTF-8 character would also be valid per spec,
# but we'll handle that separately after the loop so we can deal
# with requesting more bytes if we're not at EOF.
*|;
write init;
write exec;
}%%
// If we fall out here then we were unable to complete a sequence.
// If we weren't able to complete a sequence then either we've
// reached the end of a partial buffer (so there's more data to come)
// or we have an isolated symbol that would normally be part of a
// grapheme cluster but has appeared in isolation here.
if !atEOF {
// Request more
return 0, nil, nil
}
// Just take the first UTF-8 sequence and return that.
_, seqLen := utf8.DecodeRune(data)
return seqLen, data[:seqLen], nil
}

File diff suppressed because it is too large Load Diff

View File

@ -1,66 +0,0 @@
package textseg
import (
"fmt"
"reflect"
"strings"
"testing"
"unicode/utf8"
)
func TestScanGraphemeClusters(t *testing.T) {
tests := unicodeGraphemeTests
for i, test := range tests {
t.Run(fmt.Sprintf("%03d-%x", i, test.input), func(t *testing.T) {
got, err := AllTokens(test.input, ScanGraphemeClusters)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if !reflect.DeepEqual(got, test.output) {
// Also get the rune values resulting from decoding utf8,
// since they are generally easier to look up to figure out
// what's failing.
runes := make([]string, 0, len(test.input))
seqs := make([][]byte, 0, len(test.input))
categories := make([]string, 0, len(test.input))
buf := test.input
for len(buf) > 0 {
r, size := utf8.DecodeRune(buf)
runes = append(runes, fmt.Sprintf("0x%04x", r))
seqs = append(seqs, buf[:size])
categories = append(categories, _GraphemeRuneType(r).String())
buf = buf[size:]
}
t.Errorf(
"wrong result\ninput: %s\nutf8s: %s\nrunes: %s\ncats: %s\ngot: %s\nwant: %s",
formatBytes(test.input),
formatByteRanges(seqs),
strings.Join(runes, " "),
strings.Join(categories, " "),
formatByteRanges(got),
formatByteRanges(test.output),
)
}
})
}
}
func formatBytes(buf []byte) string {
strs := make([]string, len(buf))
for i, b := range buf {
strs[i] = fmt.Sprintf("0x%02x", b)
}
return strings.Join(strs, " ")
}
func formatByteRanges(bufs [][]byte) string {
strs := make([]string, len(bufs))
for i, b := range bufs {
strs[i] = formatBytes(b)
}
return strings.Join(strs, " | ")
}

View File

@ -1,307 +0,0 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
// Modified by Martin Atkins to serve the needs of package textseg.
// +build ignore
package main
import (
"bufio"
"flag"
"fmt"
"io"
"log"
"net/http"
"os"
"os/exec"
"sort"
"strconv"
"strings"
"unicode"
)
var url = flag.String("url",
"http://www.unicode.org/Public/"+unicode.Version+"/ucd/auxiliary/",
"URL of Unicode database directory")
var verbose = flag.Bool("verbose",
false,
"write data to stdout as it is parsed")
var localFiles = flag.Bool("local",
false,
"data files have been copied to the current directory; for debugging only")
var outputFile = flag.String("output",
"",
"output file for generated tables; default stdout")
var output *bufio.Writer
func main() {
flag.Parse()
setupOutput()
graphemePropertyRanges := make(map[string]*unicode.RangeTable)
loadUnicodeData("GraphemeBreakProperty.txt", graphemePropertyRanges)
wordPropertyRanges := make(map[string]*unicode.RangeTable)
loadUnicodeData("WordBreakProperty.txt", wordPropertyRanges)
sentencePropertyRanges := make(map[string]*unicode.RangeTable)
loadUnicodeData("SentenceBreakProperty.txt", sentencePropertyRanges)
fmt.Fprintf(output, fileHeader, *url)
generateTables("Grapheme", graphemePropertyRanges)
generateTables("Word", wordPropertyRanges)
generateTables("Sentence", sentencePropertyRanges)
flushOutput()
}
// WordBreakProperty.txt has the form:
// 05F0..05F2 ; Hebrew_Letter # Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV..HEBREW LIGATURE YIDDISH DOUBLE YOD
// FB1D ; Hebrew_Letter # Lo HEBREW LETTER YOD WITH HIRIQ
func openReader(file string) (input io.ReadCloser) {
if *localFiles {
f, err := os.Open(file)
if err != nil {
log.Fatal(err)
}
input = f
} else {
path := *url + file
resp, err := http.Get(path)
if err != nil {
log.Fatal(err)
}
if resp.StatusCode != 200 {
log.Fatal("bad GET status for "+file, resp.Status)
}
input = resp.Body
}
return
}
func loadUnicodeData(filename string, propertyRanges map[string]*unicode.RangeTable) {
f := openReader(filename)
defer f.Close()
bufioReader := bufio.NewReader(f)
line, err := bufioReader.ReadString('\n')
for err == nil {
parseLine(line, propertyRanges)
line, err = bufioReader.ReadString('\n')
}
// if the err was EOF still need to process last value
if err == io.EOF {
parseLine(line, propertyRanges)
}
}
const comment = "#"
const sep = ";"
const rnge = ".."
func parseLine(line string, propertyRanges map[string]*unicode.RangeTable) {
if strings.HasPrefix(line, comment) {
return
}
line = strings.TrimSpace(line)
if len(line) == 0 {
return
}
commentStart := strings.Index(line, comment)
if commentStart > 0 {
line = line[0:commentStart]
}
pieces := strings.Split(line, sep)
if len(pieces) != 2 {
log.Printf("unexpected %d pieces in %s", len(pieces), line)
return
}
propertyName := strings.TrimSpace(pieces[1])
rangeTable, ok := propertyRanges[propertyName]
if !ok {
rangeTable = &unicode.RangeTable{
LatinOffset: 0,
}
propertyRanges[propertyName] = rangeTable
}
codepointRange := strings.TrimSpace(pieces[0])
rngeIndex := strings.Index(codepointRange, rnge)
if rngeIndex < 0 {
// single codepoint, not range
codepointInt, err := strconv.ParseUint(codepointRange, 16, 64)
if err != nil {
log.Printf("error parsing int: %v", err)
return
}
if codepointInt < 0x10000 {
r16 := unicode.Range16{
Lo: uint16(codepointInt),
Hi: uint16(codepointInt),
Stride: 1,
}
addR16ToTable(rangeTable, r16)
} else {
r32 := unicode.Range32{
Lo: uint32(codepointInt),
Hi: uint32(codepointInt),
Stride: 1,
}
addR32ToTable(rangeTable, r32)
}
} else {
rngeStart := codepointRange[0:rngeIndex]
rngeEnd := codepointRange[rngeIndex+2:]
rngeStartInt, err := strconv.ParseUint(rngeStart, 16, 64)
if err != nil {
log.Printf("error parsing int: %v", err)
return
}
rngeEndInt, err := strconv.ParseUint(rngeEnd, 16, 64)
if err != nil {
log.Printf("error parsing int: %v", err)
return
}
if rngeStartInt < 0x10000 && rngeEndInt < 0x10000 {
r16 := unicode.Range16{
Lo: uint16(rngeStartInt),
Hi: uint16(rngeEndInt),
Stride: 1,
}
addR16ToTable(rangeTable, r16)
} else if rngeStartInt >= 0x10000 && rngeEndInt >= 0x10000 {
r32 := unicode.Range32{
Lo: uint32(rngeStartInt),
Hi: uint32(rngeEndInt),
Stride: 1,
}
addR32ToTable(rangeTable, r32)
} else {
log.Printf("unexpected range")
}
}
}
func addR16ToTable(r *unicode.RangeTable, r16 unicode.Range16) {
if r.R16 == nil {
r.R16 = make([]unicode.Range16, 0, 1)
}
r.R16 = append(r.R16, r16)
if r16.Hi <= unicode.MaxLatin1 {
r.LatinOffset++
}
}
func addR32ToTable(r *unicode.RangeTable, r32 unicode.Range32) {
if r.R32 == nil {
r.R32 = make([]unicode.Range32, 0, 1)
}
r.R32 = append(r.R32, r32)
}
func generateTables(prefix string, propertyRanges map[string]*unicode.RangeTable) {
prNames := make([]string, 0, len(propertyRanges))
for k := range propertyRanges {
prNames = append(prNames, k)
}
sort.Strings(prNames)
for _, key := range prNames {
rt := propertyRanges[key]
fmt.Fprintf(output, "var _%s%s = %s\n", prefix, key, generateRangeTable(rt))
}
fmt.Fprintf(output, "type _%sRuneRange unicode.RangeTable\n", prefix)
fmt.Fprintf(output, "func _%sRuneType(r rune) *_%sRuneRange {\n", prefix, prefix)
fmt.Fprintf(output, "\tswitch {\n")
for _, key := range prNames {
fmt.Fprintf(output, "\tcase unicode.Is(_%s%s, r):\n\t\treturn (*_%sRuneRange)(_%s%s)\n", prefix, key, prefix, prefix, key)
}
fmt.Fprintf(output, "\tdefault:\n\t\treturn nil\n")
fmt.Fprintf(output, "\t}\n")
fmt.Fprintf(output, "}\n")
fmt.Fprintf(output, "func (rng *_%sRuneRange) String() string {\n", prefix)
fmt.Fprintf(output, "\tswitch (*unicode.RangeTable)(rng) {\n")
for _, key := range prNames {
fmt.Fprintf(output, "\tcase _%s%s:\n\t\treturn %q\n", prefix, key, key)
}
fmt.Fprintf(output, "\tdefault:\n\t\treturn \"Other\"\n")
fmt.Fprintf(output, "\t}\n")
fmt.Fprintf(output, "}\n")
}
func generateRangeTable(rt *unicode.RangeTable) string {
rv := "&unicode.RangeTable{\n"
if rt.R16 != nil {
rv += "\tR16: []unicode.Range16{\n"
for _, r16 := range rt.R16 {
rv += fmt.Sprintf("\t\t%#v,\n", r16)
}
rv += "\t},\n"
}
if rt.R32 != nil {
rv += "\tR32: []unicode.Range32{\n"
for _, r32 := range rt.R32 {
rv += fmt.Sprintf("\t\t%#v,\n", r32)
}
rv += "\t},\n"
}
rv += fmt.Sprintf("\t\tLatinOffset: %d,\n", rt.LatinOffset)
rv += "}\n"
return rv
}
const fileHeader = `// Generated by running
// maketables --url=%s
// DO NOT EDIT
package textseg
import(
"unicode"
)
`
func setupOutput() {
output = bufio.NewWriter(startGofmt())
}
// startGofmt connects output to a gofmt process if -output is set.
func startGofmt() io.Writer {
if *outputFile == "" {
return os.Stdout
}
stdout, err := os.Create(*outputFile)
if err != nil {
log.Fatal(err)
}
// Pipe output to gofmt.
gofmt := exec.Command("gofmt")
fd, err := gofmt.StdinPipe()
if err != nil {
log.Fatal(err)
}
gofmt.Stdout = stdout
gofmt.Stderr = os.Stderr
err = gofmt.Start()
if err != nil {
log.Fatal(err)
}
return fd
}
func flushOutput() {
err := output.Flush()
if err != nil {
log.Fatal(err)
}
}

View File

@ -1,212 +0,0 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
// +build ignore
package main
import (
"bufio"
"bytes"
"flag"
"fmt"
"io"
"log"
"net/http"
"os"
"os/exec"
"strconv"
"strings"
"unicode"
)
var url = flag.String("url",
"http://www.unicode.org/Public/"+unicode.Version+"/ucd/auxiliary/",
"URL of Unicode database directory")
var verbose = flag.Bool("verbose",
false,
"write data to stdout as it is parsed")
var localFiles = flag.Bool("local",
false,
"data files have been copied to the current directory; for debugging only")
var outputFile = flag.String("output",
"",
"output file for generated tables; default stdout")
var output *bufio.Writer
func main() {
flag.Parse()
setupOutput()
graphemeTests := make([]test, 0)
graphemeTests = loadUnicodeData("GraphemeBreakTest.txt", graphemeTests)
wordTests := make([]test, 0)
wordTests = loadUnicodeData("WordBreakTest.txt", wordTests)
sentenceTests := make([]test, 0)
sentenceTests = loadUnicodeData("SentenceBreakTest.txt", sentenceTests)
fmt.Fprintf(output, fileHeader, *url)
generateTestTables("Grapheme", graphemeTests)
generateTestTables("Word", wordTests)
generateTestTables("Sentence", sentenceTests)
flushOutput()
}
// WordBreakProperty.txt has the form:
// 05F0..05F2 ; Hebrew_Letter # Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV..HEBREW LIGATURE YIDDISH DOUBLE YOD
// FB1D ; Hebrew_Letter # Lo HEBREW LETTER YOD WITH HIRIQ
func openReader(file string) (input io.ReadCloser) {
if *localFiles {
f, err := os.Open(file)
if err != nil {
log.Fatal(err)
}
input = f
} else {
path := *url + file
resp, err := http.Get(path)
if err != nil {
log.Fatal(err)
}
if resp.StatusCode != 200 {
log.Fatal("bad GET status for "+file, resp.Status)
}
input = resp.Body
}
return
}
func loadUnicodeData(filename string, tests []test) []test {
f := openReader(filename)
defer f.Close()
bufioReader := bufio.NewReader(f)
line, err := bufioReader.ReadString('\n')
for err == nil {
tests = parseLine(line, tests)
line, err = bufioReader.ReadString('\n')
}
// if the err was EOF still need to process last value
if err == io.EOF {
tests = parseLine(line, tests)
}
return tests
}
const comment = "#"
const brk = "÷"
const nbrk = "×"
type test [][]byte
func parseLine(line string, tests []test) []test {
if strings.HasPrefix(line, comment) {
return tests
}
line = strings.TrimSpace(line)
if len(line) == 0 {
return tests
}
commentStart := strings.Index(line, comment)
if commentStart > 0 {
line = line[0:commentStart]
}
pieces := strings.Split(line, brk)
t := make(test, 0)
for _, piece := range pieces {
piece = strings.TrimSpace(piece)
if len(piece) > 0 {
codePoints := strings.Split(piece, nbrk)
word := ""
for _, codePoint := range codePoints {
codePoint = strings.TrimSpace(codePoint)
r, err := strconv.ParseInt(codePoint, 16, 64)
if err != nil {
log.Printf("err: %v for '%s'", err, string(r))
return tests
}
word += string(r)
}
t = append(t, []byte(word))
}
}
tests = append(tests, t)
return tests
}
func generateTestTables(prefix string, tests []test) {
fmt.Fprintf(output, testHeader, prefix)
for _, t := range tests {
fmt.Fprintf(output, "\t\t{\n")
fmt.Fprintf(output, "\t\t\tinput: %#v,\n", bytes.Join(t, []byte{}))
fmt.Fprintf(output, "\t\t\toutput: %s,\n", generateTest(t))
fmt.Fprintf(output, "\t\t},\n")
}
fmt.Fprintf(output, "}\n")
}
func generateTest(t test) string {
rv := "[][]byte{"
for _, te := range t {
rv += fmt.Sprintf("%#v,", te)
}
rv += "}"
return rv
}
const fileHeader = `// Generated by running
// maketesttables --url=%s
// DO NOT EDIT
package textseg
`
const testHeader = `var unicode%sTests = []struct {
input []byte
output [][]byte
}{
`
func setupOutput() {
output = bufio.NewWriter(startGofmt())
}
// startGofmt connects output to a gofmt process if -output is set.
func startGofmt() io.Writer {
if *outputFile == "" {
return os.Stdout
}
stdout, err := os.Create(*outputFile)
if err != nil {
log.Fatal(err)
}
// Pipe output to gofmt.
gofmt := exec.Command("gofmt")
fd, err := gofmt.StdinPipe()
if err != nil {
log.Fatal(err)
}
gofmt.Stdout = stdout
gofmt.Stderr = os.Stderr
err = gofmt.Start()
if err != nil {
log.Fatal(err)
}
return fd
}
func flushOutput() {
err := output.Flush()
if err != nil {
log.Fatal(err)
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,335 +0,0 @@
#!/usr/bin/env ruby
#
# This scripted has been updated to accept more command-line arguments:
#
# -u, --url URL to process
# -m, --machine Machine name
# -p, --properties Properties to add to the machine
# -o, --output Write output to file
#
# Updated by: Marty Schoch <marty.schoch@gmail.com>
#
# This script uses the unicode spec to generate a Ragel state machine
# that recognizes unicode alphanumeric characters. It generates 5
# character classes: uupper, ulower, ualpha, udigit, and ualnum.
# Currently supported encodings are UTF-8 [default] and UCS-4.
#
# Usage: unicode2ragel.rb [options]
# -e, --encoding [ucs4 | utf8] Data encoding
# -h, --help Show this message
#
# This script was originally written as part of the Ferret search
# engine library.
#
# Author: Rakan El-Khalil <rakan@well.com>
require 'optparse'
require 'open-uri'
ENCODINGS = [ :utf8, :ucs4 ]
ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" }
DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt"
DEFAULT_MACHINE_NAME= "WChar"
###
# Display vars & default option
TOTAL_WIDTH = 80
RANGE_WIDTH = 23
@encoding = :utf8
@chart_url = DEFAULT_CHART_URL
machine_name = DEFAULT_MACHINE_NAME
properties = []
@output = $stdout
###
# Option parsing
cli_opts = OptionParser.new do |opts|
opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o|
@encoding = o.downcase.to_sym
end
opts.on("-h", "--help", "Show this message") do
puts opts
exit
end
opts.on("-u", "--url URL", "URL to process") do |o|
@chart_url = o
end
opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o|
machine_name = o
end
opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o|
properties = o
end
opts.on("-o", "--output FILE", "output file") do |o|
@output = File.new(o, "w+")
end
end
cli_opts.parse(ARGV)
unless ENCODINGS.member? @encoding
puts "Invalid encoding: #{@encoding}"
puts cli_opts
exit
end
##
# Downloads the document at url and yields every alpha line's hex
# range and description.
def each_alpha( url, property )
open( url ) do |file|
file.each_line do |line|
next if line =~ /^#/;
next if line !~ /; #{property} #/;
range, description = line.split(/;/)
range.strip!
description.gsub!(/.*#/, '').strip!
if range =~ /\.\./
start, stop = range.split '..'
else start = stop = range
end
yield start.hex .. stop.hex, description
end
end
end
###
# Formats to hex at minimum width
def to_hex( n )
r = "%0X" % n
r = "0#{r}" unless (r.length % 2).zero?
r
end
###
# UCS4 is just a straight hex conversion of the unicode codepoint.
def to_ucs4( range )
rangestr = "0x" + to_hex(range.begin)
rangestr << "..0x" + to_hex(range.end) if range.begin != range.end
[ rangestr ]
end
##
# 0x00 - 0x7f -> 0zzzzzzz[7]
# 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6]
# 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6]
# 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6]
UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff]
def to_utf8_enc( n )
r = 0
if n <= 0x7f
r = n
elsif n <= 0x7ff
y = 0xc0 | (n >> 6)
z = 0x80 | (n & 0x3f)
r = y << 8 | z
elsif n <= 0xffff
x = 0xe0 | (n >> 12)
y = 0x80 | (n >> 6) & 0x3f
z = 0x80 | n & 0x3f
r = x << 16 | y << 8 | z
elsif n <= 0x10ffff
w = 0xf0 | (n >> 18)
x = 0x80 | (n >> 12) & 0x3f
y = 0x80 | (n >> 6) & 0x3f
z = 0x80 | n & 0x3f
r = w << 24 | x << 16 | y << 8 | z
end
to_hex(r)
end
def from_utf8_enc( n )
n = n.hex
r = 0
if n <= 0x7f
r = n
elsif n <= 0xdfff
y = (n >> 8) & 0x1f
z = n & 0x3f
r = y << 6 | z
elsif n <= 0xefffff
x = (n >> 16) & 0x0f
y = (n >> 8) & 0x3f
z = n & 0x3f
r = x << 10 | y << 6 | z
elsif n <= 0xf7ffffff
w = (n >> 24) & 0x07
x = (n >> 16) & 0x3f
y = (n >> 8) & 0x3f
z = n & 0x3f
r = w << 18 | x << 12 | y << 6 | z
end
r
end
###
# Given a range, splits it up into ranges that can be continuously
# encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff]
# This is not strictly needed since the current [5.1] unicode standard
# doesn't have ranges that straddle utf8 boundaries. This is included
# for completeness as there is no telling if that will ever change.
def utf8_ranges( range )
ranges = []
UTF8_BOUNDARIES.each do |max|
if range.begin <= max
if range.end <= max
ranges << range
return ranges
end
ranges << (range.begin .. max)
range = (max + 1) .. range.end
end
end
ranges
end
def build_range( start, stop )
size = start.size/2
left = size - 1
return [""] if size < 1
a = start[0..1]
b = stop[0..1]
###
# Shared prefix
if a == b
return build_range(start[2..-1], stop[2..-1]).map do |elt|
"0x#{a} " + elt
end
end
###
# Unshared prefix, end of run
return ["0x#{a}..0x#{b} "] if left.zero?
###
# Unshared prefix, not end of run
# Range can be 0x123456..0x56789A
# Which is equivalent to:
# 0x123456 .. 0x12FFFF
# 0x130000 .. 0x55FFFF
# 0x560000 .. 0x56789A
ret = []
ret << build_range(start, a + "FF" * left)
###
# Only generate middle range if need be.
if a.hex+1 != b.hex
max = to_hex(b.hex - 1)
max = "FF" if b == "FF"
ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left
end
###
# Don't generate last range if it is covered by first range
ret << build_range(b + "00" * left, stop) unless b == "FF"
ret.flatten!
end
def to_utf8( range )
utf8_ranges( range ).map do |r|
begin_enc = to_utf8_enc(r.begin)
end_enc = to_utf8_enc(r.end)
build_range begin_enc, end_enc
end.flatten!
end
##
# Perform a 3-way comparison of the number of codepoints advertised by
# the unicode spec for the given range, the originally parsed range,
# and the resulting utf8 encoded range.
def count_codepoints( code )
code.split(' ').inject(1) do |acc, elt|
if elt =~ /0x(.+)\.\.0x(.+)/
if @encoding == :utf8
acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1)
else
acc * ($2.hex - $1.hex + 1)
end
else
acc
end
end
end
def is_valid?( range, desc, codes )
spec_count = 1
spec_count = $1.to_i if desc =~ /\[(\d+)\]/
range_count = range.end - range.begin + 1
sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) }
sum == spec_count and sum == range_count
end
##
# Generate the state maching to stdout
def generate_machine( name, property )
pipe = " "
@output.puts " #{name} = "
each_alpha( @chart_url, property ) do |range, desc|
codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range)
#raise "Invalid encoding of range #{range}: #{codes.inspect}" unless
# is_valid? range, desc, codes
range_width = codes.map { |a| a.size }.max
range_width = RANGE_WIDTH if range_width < RANGE_WIDTH
desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11
desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH
if desc.size > desc_width
desc = desc[0..desc_width - 4] + "..."
end
codes.each_with_index do |r, idx|
desc = "" unless idx.zero?
code = "%-#{range_width}s" % r
@output.puts " #{pipe} #{code} ##{desc}"
pipe = "|"
end
end
@output.puts " ;"
@output.puts ""
end
@output.puts <<EOF
# The following Ragel file was autogenerated with #{$0}
# from: #{@chart_url}
#
# It defines #{properties}.
#
# To use this, make sure that your alphtype is set to #{ALPHTYPES[@encoding]},
# and that your input is in #{@encoding}.
%%{
machine #{machine_name};
EOF
properties.each { |x| generate_machine( x, x ) }
@output.puts <<EOF
}%%
EOF

View File

@ -1,19 +0,0 @@
package textseg
import "unicode/utf8"
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
// on UTF8 sequence boundaries.
//
// This is included largely for completeness, since this behavior is already
// built in to Go when ranging over a string.
func ScanUTF8Sequences(data []byte, atEOF bool) (int, []byte, error) {
if len(data) == 0 {
return 0, nil, nil
}
r, seqLen := utf8.DecodeRune(data)
if r == utf8.RuneError && !atEOF {
return 0, nil, nil
}
return seqLen, data[:seqLen], nil
}

View File

@ -1,22 +0,0 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe

View File

@ -1,3 +0,0 @@
language: go
go:
- tip

View File

@ -1,20 +0,0 @@
The MIT License (MIT)
Copyright (c) 2014 Armon Dadgar
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,38 +0,0 @@
go-radix [![Build Status](https://travis-ci.org/armon/go-radix.png)](https://travis-ci.org/armon/go-radix)
=========
Provides the `radix` package that implements a [radix tree](http://en.wikipedia.org/wiki/Radix_tree).
The package only provides a single `Tree` implementation, optimized for sparse nodes.
As a radix tree, it provides the following:
* O(k) operations. In many cases, this can be faster than a hash table since
the hash function is an O(k) operation, and hash tables have very poor cache locality.
* Minimum / Maximum value lookups
* Ordered iteration
For an immutable variant, see [go-immutable-radix](https://github.com/hashicorp/go-immutable-radix).
Documentation
=============
The full documentation is available on [Godoc](http://godoc.org/github.com/armon/go-radix).
Example
=======
Below is a simple example of usage
```go
// Create a tree
r := radix.New()
r.Insert("foo", 1)
r.Insert("bar", 2)
r.Insert("foobar", 2)
// Find the longest prefix match
m, _, _ := r.LongestPrefix("foozip")
if m != "foo" {
panic("should be foo")
}
```

View File

@ -1,543 +0,0 @@
package radix
import (
"sort"
"strings"
)
// WalkFn is used when walking the tree. Takes a
// key and value, returning if iteration should
// be terminated.
type WalkFn func(s string, v interface{}) bool
// leafNode is used to represent a value
type leafNode struct {
key string
val interface{}
}
// edge is used to represent an edge node
type edge struct {
label byte
node *node
}
type node struct {
// leaf is used to store possible leaf
leaf *leafNode
// prefix is the common prefix we ignore
prefix string
// Edges should be stored in-order for iteration.
// We avoid a fully materialized slice to save memory,
// since in most cases we expect to be sparse
edges edges
}
func (n *node) isLeaf() bool {
return n.leaf != nil
}
func (n *node) addEdge(e edge) {
n.edges = append(n.edges, e)
n.edges.Sort()
}
func (n *node) replaceEdge(e edge) {
num := len(n.edges)
idx := sort.Search(num, func(i int) bool {
return n.edges[i].label >= e.label
})
if idx < num && n.edges[idx].label == e.label {
n.edges[idx].node = e.node
return
}
panic("replacing missing edge")
}
func (n *node) getEdge(label byte) *node {
num := len(n.edges)
idx := sort.Search(num, func(i int) bool {
return n.edges[i].label >= label
})
if idx < num && n.edges[idx].label == label {
return n.edges[idx].node
}
return nil
}
func (n *node) delEdge(label byte) {
num := len(n.edges)
idx := sort.Search(num, func(i int) bool {
return n.edges[i].label >= label
})
if idx < num && n.edges[idx].label == label {
copy(n.edges[idx:], n.edges[idx+1:])
n.edges[len(n.edges)-1] = edge{}
n.edges = n.edges[:len(n.edges)-1]
}
}
type edges []edge
func (e edges) Len() int {
return len(e)
}
func (e edges) Less(i, j int) bool {
return e[i].label < e[j].label
}
func (e edges) Swap(i, j int) {
e[i], e[j] = e[j], e[i]
}
func (e edges) Sort() {
sort.Sort(e)
}
// Tree implements a radix tree. This can be treated as a
// Dictionary abstract data type. The main advantage over
// a standard hash map is prefix-based lookups and
// ordered iteration,
type Tree struct {
root *node
size int
}
// New returns an empty Tree
func New() *Tree {
return NewFromMap(nil)
}
// NewFromMap returns a new tree containing the keys
// from an existing map
func NewFromMap(m map[string]interface{}) *Tree {
t := &Tree{root: &node{}}
for k, v := range m {
t.Insert(k, v)
}
return t
}
// Len is used to return the number of elements in the tree
func (t *Tree) Len() int {
return t.size
}
// longestPrefix finds the length of the shared prefix
// of two strings
func longestPrefix(k1, k2 string) int {
max := len(k1)
if l := len(k2); l < max {
max = l
}
var i int
for i = 0; i < max; i++ {
if k1[i] != k2[i] {
break
}
}
return i
}
// Insert is used to add a newentry or update
// an existing entry. Returns if updated.
func (t *Tree) Insert(s string, v interface{}) (interface{}, bool) {
var parent *node
n := t.root
search := s
for {
// Handle key exhaution
if len(search) == 0 {
if n.isLeaf() {
old := n.leaf.val
n.leaf.val = v
return old, true
}
n.leaf = &leafNode{
key: s,
val: v,
}
t.size++
return nil, false
}
// Look for the edge
parent = n
n = n.getEdge(search[0])
// No edge, create one
if n == nil {
e := edge{
label: search[0],
node: &node{
leaf: &leafNode{
key: s,
val: v,
},
prefix: search,
},
}
parent.addEdge(e)
t.size++
return nil, false
}
// Determine longest prefix of the search key on match
commonPrefix := longestPrefix(search, n.prefix)
if commonPrefix == len(n.prefix) {
search = search[commonPrefix:]
continue
}
// Split the node
t.size++
child := &node{
prefix: search[:commonPrefix],
}
parent.replaceEdge(edge{
label: search[0],
node: child,
})
// Restore the existing node
child.addEdge(edge{
label: n.prefix[commonPrefix],
node: n,
})
n.prefix = n.prefix[commonPrefix:]
// Create a new leaf node
leaf := &leafNode{
key: s,
val: v,
}
// If the new key is a subset, add to to this node
search = search[commonPrefix:]
if len(search) == 0 {
child.leaf = leaf
return nil, false
}
// Create a new edge for the node
child.addEdge(edge{
label: search[0],
node: &node{
leaf: leaf,
prefix: search,
},
})
return nil, false
}
}
// Delete is used to delete a key, returning the previous
// value and if it was deleted
func (t *Tree) Delete(s string) (interface{}, bool) {
var parent *node
var label byte
n := t.root
search := s
for {
// Check for key exhaution
if len(search) == 0 {
if !n.isLeaf() {
break
}
goto DELETE
}
// Look for an edge
parent = n
label = search[0]
n = n.getEdge(label)
if n == nil {
break
}
// Consume the search prefix
if strings.HasPrefix(search, n.prefix) {
search = search[len(n.prefix):]
} else {
break
}
}
return nil, false
DELETE:
// Delete the leaf
leaf := n.leaf
n.leaf = nil
t.size--
// Check if we should delete this node from the parent
if parent != nil && len(n.edges) == 0 {
parent.delEdge(label)
}
// Check if we should merge this node
if n != t.root && len(n.edges) == 1 {
n.mergeChild()
}
// Check if we should merge the parent's other child
if parent != nil && parent != t.root && len(parent.edges) == 1 && !parent.isLeaf() {
parent.mergeChild()
}
return leaf.val, true
}
// DeletePrefix is used to delete the subtree under a prefix
// Returns how many nodes were deleted
// Use this to delete large subtrees efficiently
func (t *Tree) DeletePrefix(s string) int {
return t.deletePrefix(nil, t.root, s)
}
// delete does a recursive deletion
func (t *Tree) deletePrefix(parent, n *node, prefix string) int {
// Check for key exhaustion
if len(prefix) == 0 {
// Remove the leaf node
subTreeSize := 0
//recursively walk from all edges of the node to be deleted
recursiveWalk(n, func(s string, v interface{}) bool {
subTreeSize++
return false
})
if n.isLeaf() {
n.leaf = nil
}
n.edges = nil // deletes the entire subtree
// Check if we should merge the parent's other child
if parent != nil && parent != t.root && len(parent.edges) == 1 && !parent.isLeaf() {
parent.mergeChild()
}
t.size -= subTreeSize
return subTreeSize
}
// Look for an edge
label := prefix[0]
child := n.getEdge(label)
if child == nil || (!strings.HasPrefix(child.prefix, prefix) && !strings.HasPrefix(prefix, child.prefix)) {
return 0
}
// Consume the search prefix
if len(child.prefix) > len(prefix) {
prefix = prefix[len(prefix):]
} else {
prefix = prefix[len(child.prefix):]
}
return t.deletePrefix(n, child, prefix)
}
func (n *node) mergeChild() {
e := n.edges[0]
child := e.node
n.prefix = n.prefix + child.prefix
n.leaf = child.leaf
n.edges = child.edges
}
// Get is used to lookup a specific key, returning
// the value and if it was found
func (t *Tree) Get(s string) (interface{}, bool) {
n := t.root
search := s
for {
// Check for key exhaution
if len(search) == 0 {
if n.isLeaf() {
return n.leaf.val, true
}
break
}
// Look for an edge
n = n.getEdge(search[0])
if n == nil {
break
}
// Consume the search prefix
if strings.HasPrefix(search, n.prefix) {
search = search[len(n.prefix):]
} else {
break
}
}
return nil, false
}
// LongestPrefix is like Get, but instead of an
// exact match, it will return the longest prefix match.
func (t *Tree) LongestPrefix(s string) (string, interface{}, bool) {
var last *leafNode
n := t.root
search := s
for {
// Look for a leaf node
if n.isLeaf() {
last = n.leaf
}
// Check for key exhaution
if len(search) == 0 {
break
}
// Look for an edge
n = n.getEdge(search[0])
if n == nil {
break
}
// Consume the search prefix
if strings.HasPrefix(search, n.prefix) {
search = search[len(n.prefix):]
} else {
break
}
}
if last != nil {
return last.key, last.val, true
}
return "", nil, false
}
// Minimum is used to return the minimum value in the tree
func (t *Tree) Minimum() (string, interface{}, bool) {
n := t.root
for {
if n.isLeaf() {
return n.leaf.key, n.leaf.val, true
}
if len(n.edges) > 0 {
n = n.edges[0].node
} else {
break
}
}
return "", nil, false
}
// Maximum is used to return the maximum value in the tree
func (t *Tree) Maximum() (string, interface{}, bool) {
n := t.root
for {
if num := len(n.edges); num > 0 {
n = n.edges[num-1].node
continue
}
if n.isLeaf() {
return n.leaf.key, n.leaf.val, true
}
break
}
return "", nil, false
}
// Walk is used to walk the tree
func (t *Tree) Walk(fn WalkFn) {
recursiveWalk(t.root, fn)
}
// WalkPrefix is used to walk the tree under a prefix
func (t *Tree) WalkPrefix(prefix string, fn WalkFn) {
n := t.root
search := prefix
for {
// Check for key exhaution
if len(search) == 0 {
recursiveWalk(n, fn)
return
}
// Look for an edge
n = n.getEdge(search[0])
if n == nil {
break
}
// Consume the search prefix
if strings.HasPrefix(search, n.prefix) {
search = search[len(n.prefix):]
} else if strings.HasPrefix(n.prefix, search) {
// Child may be under our search prefix
recursiveWalk(n, fn)
return
} else {
break
}
}
}
// WalkPath is used to walk the tree, but only visiting nodes
// from the root down to a given leaf. Where WalkPrefix walks
// all the entries *under* the given prefix, this walks the
// entries *above* the given prefix.
func (t *Tree) WalkPath(path string, fn WalkFn) {
n := t.root
search := path
for {
// Visit the leaf values if any
if n.leaf != nil && fn(n.leaf.key, n.leaf.val) {
return
}
// Check for key exhaution
if len(search) == 0 {
return
}
// Look for an edge
n = n.getEdge(search[0])
if n == nil {
return
}
// Consume the search prefix
if strings.HasPrefix(search, n.prefix) {
search = search[len(n.prefix):]
} else {
break
}
}
}
// recursiveWalk is used to do a pre-order walk of a node
// recursively. Returns true if the walk should be aborted
func recursiveWalk(n *node, fn WalkFn) bool {
// Visit the leaf values if any
if n.leaf != nil && fn(n.leaf.key, n.leaf.val) {
return true
}
// Recurse on the children
for _, e := range n.edges {
if recursiveWalk(e.node, fn) {
return true
}
}
return false
}
// ToMap is used to walk the tree and convert it into a map
func (t *Tree) ToMap() map[string]interface{} {
out := make(map[string]interface{}, t.size)
t.Walk(func(k string, v interface{}) bool {
out[k] = v
return false
})
return out
}

View File

@ -1,359 +0,0 @@
package radix
import (
crand "crypto/rand"
"fmt"
"reflect"
"sort"
"testing"
)
func TestRadix(t *testing.T) {
var min, max string
inp := make(map[string]interface{})
for i := 0; i < 1000; i++ {
gen := generateUUID()
inp[gen] = i
if gen < min || i == 0 {
min = gen
}
if gen > max || i == 0 {
max = gen
}
}
r := NewFromMap(inp)
if r.Len() != len(inp) {
t.Fatalf("bad length: %v %v", r.Len(), len(inp))
}
r.Walk(func(k string, v interface{}) bool {
println(k)
return false
})
for k, v := range inp {
out, ok := r.Get(k)
if !ok {
t.Fatalf("missing key: %v", k)
}
if out != v {
t.Fatalf("value mis-match: %v %v", out, v)
}
}
// Check min and max
outMin, _, _ := r.Minimum()
if outMin != min {
t.Fatalf("bad minimum: %v %v", outMin, min)
}
outMax, _, _ := r.Maximum()
if outMax != max {
t.Fatalf("bad maximum: %v %v", outMax, max)
}
for k, v := range inp {
out, ok := r.Delete(k)
if !ok {
t.Fatalf("missing key: %v", k)
}
if out != v {
t.Fatalf("value mis-match: %v %v", out, v)
}
}
if r.Len() != 0 {
t.Fatalf("bad length: %v", r.Len())
}
}
func TestRoot(t *testing.T) {
r := New()
_, ok := r.Delete("")
if ok {
t.Fatalf("bad")
}
_, ok = r.Insert("", true)
if ok {
t.Fatalf("bad")
}
val, ok := r.Get("")
if !ok || val != true {
t.Fatalf("bad: %v", val)
}
val, ok = r.Delete("")
if !ok || val != true {
t.Fatalf("bad: %v", val)
}
}
func TestDelete(t *testing.T) {
r := New()
s := []string{"", "A", "AB"}
for _, ss := range s {
r.Insert(ss, true)
}
for _, ss := range s {
_, ok := r.Delete(ss)
if !ok {
t.Fatalf("bad %q", ss)
}
}
}
func TestDeletePrefix(t *testing.T) {
type exp struct {
inp[] string
prefix string
out[] string
numDeleted int
}
cases := []exp{
{[]string{"", "A", "AB", "ABC", "R", "S"}, "A", []string{"", "R", "S"}, 3},
{[]string{"", "A", "AB", "ABC", "R", "S"}, "ABC", []string{"", "A", "AB", "R", "S"}, 1},
{[]string{"", "A", "AB", "ABC", "R", "S"}, "", []string{}, 6},
{[]string{"", "A", "AB", "ABC", "R", "S"}, "S", []string{"", "A", "AB", "ABC", "R"}, 1},
{[]string{"", "A", "AB", "ABC", "R", "S"}, "SS", []string{"", "A", "AB", "ABC", "R", "S"}, 0},
}
for _, test := range cases {
r := New()
for _, ss := range test.inp {
r.Insert(ss, true)
}
deleted := r.DeletePrefix(test.prefix)
if deleted != test.numDeleted {
t.Fatalf("Bad delete, expected %v to be deleted but got %v", test.numDeleted, deleted)
}
out := []string{}
fn := func(s string, v interface{}) bool {
out = append(out, s)
return false
}
r.Walk(fn)
if !reflect.DeepEqual(out, test.out) {
t.Fatalf("mis-match: %v %v", out, test.out)
}
}
}
func TestLongestPrefix(t *testing.T) {
r := New()
keys := []string{
"",
"foo",
"foobar",
"foobarbaz",
"foobarbazzip",
"foozip",
}
for _, k := range keys {
r.Insert(k, nil)
}
if r.Len() != len(keys) {
t.Fatalf("bad len: %v %v", r.Len(), len(keys))
}
type exp struct {
inp string
out string
}
cases := []exp{
{"a", ""},
{"abc", ""},
{"fo", ""},
{"foo", "foo"},
{"foob", "foo"},
{"foobar", "foobar"},
{"foobarba", "foobar"},
{"foobarbaz", "foobarbaz"},
{"foobarbazzi", "foobarbaz"},
{"foobarbazzip", "foobarbazzip"},
{"foozi", "foo"},
{"foozip", "foozip"},
{"foozipzap", "foozip"},
}
for _, test := range cases {
m, _, ok := r.LongestPrefix(test.inp)
if !ok {
t.Fatalf("no match: %v", test)
}
if m != test.out {
t.Fatalf("mis-match: %v %v", m, test)
}
}
}
func TestWalkPrefix(t *testing.T) {
r := New()
keys := []string{
"foobar",
"foo/bar/baz",
"foo/baz/bar",
"foo/zip/zap",
"zipzap",
}
for _, k := range keys {
r.Insert(k, nil)
}
if r.Len() != len(keys) {
t.Fatalf("bad len: %v %v", r.Len(), len(keys))
}
type exp struct {
inp string
out []string
}
cases := []exp{
{
"f",
[]string{"foobar", "foo/bar/baz", "foo/baz/bar", "foo/zip/zap"},
},
{
"foo",
[]string{"foobar", "foo/bar/baz", "foo/baz/bar", "foo/zip/zap"},
},
{
"foob",
[]string{"foobar"},
},
{
"foo/",
[]string{"foo/bar/baz", "foo/baz/bar", "foo/zip/zap"},
},
{
"foo/b",
[]string{"foo/bar/baz", "foo/baz/bar"},
},
{
"foo/ba",
[]string{"foo/bar/baz", "foo/baz/bar"},
},
{
"foo/bar",
[]string{"foo/bar/baz"},
},
{
"foo/bar/baz",
[]string{"foo/bar/baz"},
},
{
"foo/bar/bazoo",
[]string{},
},
{
"z",
[]string{"zipzap"},
},
}
for _, test := range cases {
out := []string{}
fn := func(s string, v interface{}) bool {
out = append(out, s)
return false
}
r.WalkPrefix(test.inp, fn)
sort.Strings(out)
sort.Strings(test.out)
if !reflect.DeepEqual(out, test.out) {
t.Fatalf("mis-match: %v %v", out, test.out)
}
}
}
func TestWalkPath(t *testing.T) {
r := New()
keys := []string{
"foo",
"foo/bar",
"foo/bar/baz",
"foo/baz/bar",
"foo/zip/zap",
"zipzap",
}
for _, k := range keys {
r.Insert(k, nil)
}
if r.Len() != len(keys) {
t.Fatalf("bad len: %v %v", r.Len(), len(keys))
}
type exp struct {
inp string
out []string
}
cases := []exp{
{
"f",
[]string{},
},
{
"foo",
[]string{"foo"},
},
{
"foo/",
[]string{"foo"},
},
{
"foo/ba",
[]string{"foo"},
},
{
"foo/bar",
[]string{"foo", "foo/bar"},
},
{
"foo/bar/baz",
[]string{"foo", "foo/bar", "foo/bar/baz"},
},
{
"foo/bar/bazoo",
[]string{"foo", "foo/bar", "foo/bar/baz"},
},
{
"z",
[]string{},
},
}
for _, test := range cases {
out := []string{}
fn := func(s string, v interface{}) bool {
out = append(out, s)
return false
}
r.WalkPath(test.inp, fn)
sort.Strings(out)
sort.Strings(test.out)
if !reflect.DeepEqual(out, test.out) {
t.Fatalf("mis-match: %v %v", out, test.out)
}
}
}
// generateUUID is used to generate a random UUID
func generateUUID() string {
buf := make([]byte, 16)
if _, err := crand.Read(buf); err != nil {
panic(fmt.Errorf("failed to read random bytes: %v", err))
}
return fmt.Sprintf("%08x-%04x-%04x-%04x-%12x",
buf[0:4],
buf[4:6],
buf[6:8],
buf[8:10],
buf[10:16])
}

View File

@ -1,11 +0,0 @@
dist
/doc
/doc-staging
.yardoc
Gemfile.lock
awstesting/integration/smoke/**/importmarker__.go
awstesting/integration/smoke/_test/
/vendor/bin/
/vendor/pkg/
/vendor/src/
/private/model/cli/gen-api/gen-api

View File

@ -1,14 +0,0 @@
{
"PkgHandler": {
"Pattern": "/sdk-for-go/api/",
"StripPrefix": "/sdk-for-go/api",
"Include": ["/src/github.com/aws/aws-sdk-go/aws", "/src/github.com/aws/aws-sdk-go/service"],
"Exclude": ["/src/cmd", "/src/github.com/aws/aws-sdk-go/awstesting", "/src/github.com/aws/aws-sdk-go/awsmigrate"],
"IgnoredSuffixes": ["iface"]
},
"Github": {
"Tag": "master",
"Repo": "/aws/aws-sdk-go",
"UseGithub": true
}
}

Some files were not shown because too many files have changed in this diff Show More