This commit is contained in:
parent
967e033b7e
commit
8825a26ae6
593 changed files with 88072 additions and 28 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -5,3 +5,6 @@ log
|
|||
*.iml
|
||||
traefik
|
||||
traefik.toml
|
||||
|
||||
Godeps/_workspace/bin
|
||||
Godeps/_workspace/pkg
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
FROM scratch
|
||||
COPY dist/traefik_linux-386 /traefik
|
||||
COPY dist/traefik /
|
||||
ENTRYPOINT ["/traefik"]
|
||||
|
|
135
Godeps/Godeps.json
generated
Normal file
135
Godeps/Godeps.json
generated
Normal file
|
@ -0,0 +1,135 @@
|
|||
{
|
||||
"ImportPath": "github.com/emilevauge/traefik",
|
||||
"GoVersion": "go1.5",
|
||||
"Packages": [
|
||||
"./..."
|
||||
],
|
||||
"Deps": [
|
||||
{
|
||||
"ImportPath": "github.com/BurntSushi/toml",
|
||||
"Rev": "bd2bdf7f18f849530ef7a1c29a4290217cab32a1"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/BurntSushi/ty",
|
||||
"Rev": "6add9cd6ad42d389d6ead1dde60b4ad71e46fd74"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/alecthomas/template",
|
||||
"Rev": "b867cc6ab45cece8143cfcc6fc9c77cf3f2c23c0"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/alecthomas/units",
|
||||
"Rev": "6b4e7dc5e3143b85ea77909c72caf89416fc2915"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/cenkalti/backoff",
|
||||
"Rev": "4dc77674aceaabba2c7e3da25d4c823edfb73f99"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/codahale/hdrhistogram",
|
||||
"Rev": "954f16e8b9ef0e5d5189456aa4c1202758e04f17"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/codegangsta/negroni",
|
||||
"Comment": "v0.1-70-gc7477ad",
|
||||
"Rev": "c7477ad8e330bef55bf1ebe300cf8aa67c492d1b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/elazarl/go-bindata-assetfs",
|
||||
"Rev": "d5cac425555ca5cf00694df246e04f05e6a55150"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/fsouza/go-dockerclient",
|
||||
"Rev": "0239034d42f665efa17fd77c39f891c2f9f32922"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/gambol99/go-marathon",
|
||||
"Rev": "0ba31bcb0d7633ba1888d744c42990eb15281cf1"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/gorilla/context",
|
||||
"Rev": "215affda49addc4c8ef7e2534915df2c8c35c6cd"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/gorilla/handlers",
|
||||
"Rev": "40694b40f4a928c062f56849989d3e9cd0570e5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/gorilla/mux",
|
||||
"Rev": "f15e0c49460fd49eebe2bcc8486b05d1bef68d3a"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/hashicorp/consul/api",
|
||||
"Comment": "v0.5.2-313-gde08067",
|
||||
"Rev": "de080672fee9e6104572eeea89eccdca135bb918"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mailgun/log",
|
||||
"Rev": "44874009257d4d47ba9806f1b7f72a32a015e4d8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mailgun/oxy/cbreaker",
|
||||
"Rev": "547c334d658398c05b346c0b79d8f47ba2e1473b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mailgun/oxy/forward",
|
||||
"Rev": "547c334d658398c05b346c0b79d8f47ba2e1473b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mailgun/oxy/memmetrics",
|
||||
"Rev": "547c334d658398c05b346c0b79d8f47ba2e1473b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mailgun/oxy/roundrobin",
|
||||
"Rev": "547c334d658398c05b346c0b79d8f47ba2e1473b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mailgun/oxy/utils",
|
||||
"Rev": "547c334d658398c05b346c0b79d8f47ba2e1473b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mailgun/predicate",
|
||||
"Rev": "cb0bff91a7ab7cf7571e661ff883fc997bc554a3"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mailgun/timetools",
|
||||
"Rev": "fd192d755b00c968d312d23f521eb0cdc6f66bd0"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/op/go-logging",
|
||||
"Rev": "e8d5414f0947014548c2334044a0fac13187dfee"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/thoas/stats",
|
||||
"Rev": "54ed61c2b47e263ae2f01b86837b0c4bd1da28e8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/tylerb/graceful",
|
||||
"Comment": "v1.2.1",
|
||||
"Rev": "ac9ebe4f1ee151ac1eeeaef32957085cba64d508"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/unrolled/render",
|
||||
"Rev": "26b4e3aac686940fe29521545afad9966ddfc80c"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/net/netutil",
|
||||
"Rev": "d9558e5c97f85372afee28cf2b6059d7d3818919"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/alecthomas/kingpin.v2",
|
||||
"Comment": "v2.0.12",
|
||||
"Rev": "639879d6110b1b0409410c7b737ef0bb18325038"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/fsnotify.v1",
|
||||
"Comment": "v1.2.0",
|
||||
"Rev": "96c060f6a6b7e0d6f75fddd10efeaca3e5d1bcb0"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/mgo.v2/bson",
|
||||
"Comment": "r2015.06.03-5-g22287ba",
|
||||
"Rev": "22287bab4379e1fbf6002fb4eb769888f3fb224c"
|
||||
}
|
||||
]
|
||||
}
|
5
Godeps/Readme
generated
Normal file
5
Godeps/Readme
generated
Normal file
|
@ -0,0 +1,5 @@
|
|||
This directory tree is generated automatically by godep.
|
||||
|
||||
Please do not edit.
|
||||
|
||||
See https://github.com/tools/godep for more information.
|
2
Godeps/_workspace/.gitignore
generated
vendored
Normal file
2
Godeps/_workspace/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
/pkg
|
||||
/bin
|
5
Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore
generated
vendored
Normal file
5
Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
TAGS
|
||||
tags
|
||||
.*.swp
|
||||
tomlcheck/tomlcheck
|
||||
toml.test
|
12
Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml
generated
vendored
Normal file
12
Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
language: go
|
||||
go:
|
||||
- 1.1
|
||||
- 1.2
|
||||
- tip
|
||||
install:
|
||||
- go install ./...
|
||||
- go get github.com/BurntSushi/toml-test
|
||||
script:
|
||||
- export PATH="$PATH:$HOME/gopath/bin"
|
||||
- make test
|
||||
|
3
Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
Normal file
3
Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
14
Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||
|
19
Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile
generated
vendored
Normal file
19
Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
install:
|
||||
go install ./...
|
||||
|
||||
test: install
|
||||
go test -v
|
||||
toml-test toml-test-decoder
|
||||
toml-test -encoder toml-test-encoder
|
||||
|
||||
fmt:
|
||||
gofmt -w *.go */*.go
|
||||
colcheck *.go */*.go
|
||||
|
||||
tags:
|
||||
find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS
|
||||
|
||||
push:
|
||||
git push origin master
|
||||
git push github master
|
||||
|
220
Godeps/_workspace/src/github.com/BurntSushi/toml/README.md
generated
vendored
Normal file
220
Godeps/_workspace/src/github.com/BurntSushi/toml/README.md
generated
vendored
Normal file
|
@ -0,0 +1,220 @@
|
|||
## TOML parser and encoder for Go with reflection
|
||||
|
||||
TOML stands for Tom's Obvious, Minimal Language. This Go package provides a
|
||||
reflection interface similar to Go's standard library `json` and `xml`
|
||||
packages. This package also supports the `encoding.TextUnmarshaler` and
|
||||
`encoding.TextMarshaler` interfaces so that you can define custom data
|
||||
representations. (There is an example of this below.)
|
||||
|
||||
Spec: https://github.com/mojombo/toml
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
||||
Documentation: http://godoc.org/github.com/BurntSushi/toml
|
||||
|
||||
Installation:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml
|
||||
```
|
||||
|
||||
Try the toml validator:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml/cmd/tomlv
|
||||
tomlv some-toml-file.toml
|
||||
```
|
||||
|
||||
[![Build status](https://api.travis-ci.org/BurntSushi/toml.png)](https://travis-ci.org/BurntSushi/toml)
|
||||
|
||||
|
||||
### Testing
|
||||
|
||||
This package passes all tests in
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder
|
||||
and the encoder.
|
||||
|
||||
### Examples
|
||||
|
||||
This package works similarly to how the Go standard library handles `XML`
|
||||
and `JSON`. Namely, data is loaded into Go values via reflection.
|
||||
|
||||
For the simplest example, consider some TOML file as just a list of keys
|
||||
and values:
|
||||
|
||||
```toml
|
||||
Age = 25
|
||||
Cats = [ "Cauchy", "Plato" ]
|
||||
Pi = 3.14
|
||||
Perfection = [ 6, 28, 496, 8128 ]
|
||||
DOB = 1987-07-05T05:45:00Z
|
||||
```
|
||||
|
||||
Which could be defined in Go as:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
Age int
|
||||
Cats []string
|
||||
Pi float64
|
||||
Perfection []int
|
||||
DOB time.Time // requires `import time`
|
||||
}
|
||||
```
|
||||
|
||||
And then decoded with:
|
||||
|
||||
```go
|
||||
var conf Config
|
||||
if _, err := toml.Decode(tomlData, &conf); err != nil {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
You can also use struct tags if your struct field name doesn't map to a TOML
|
||||
key value directly:
|
||||
|
||||
```toml
|
||||
some_key_NAME = "wat"
|
||||
```
|
||||
|
||||
```go
|
||||
type TOML struct {
|
||||
ObscureKey string `toml:"some_key_NAME"`
|
||||
}
|
||||
```
|
||||
|
||||
### Using the `encoding.TextUnmarshaler` interface
|
||||
|
||||
Here's an example that automatically parses duration strings into
|
||||
`time.Duration` values:
|
||||
|
||||
```toml
|
||||
[[song]]
|
||||
name = "Thunder Road"
|
||||
duration = "4m49s"
|
||||
|
||||
[[song]]
|
||||
name = "Stairway to Heaven"
|
||||
duration = "8m03s"
|
||||
```
|
||||
|
||||
Which can be decoded with:
|
||||
|
||||
```go
|
||||
type song struct {
|
||||
Name string
|
||||
Duration duration
|
||||
}
|
||||
type songs struct {
|
||||
Song []song
|
||||
}
|
||||
var favorites songs
|
||||
if _, err := Decode(blob, &favorites); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
for _, s := range favorites.Song {
|
||||
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
|
||||
}
|
||||
```
|
||||
|
||||
And you'll also need a `duration` type that satisfies the
|
||||
`encoding.TextUnmarshaler` interface:
|
||||
|
||||
```go
|
||||
type duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (d *duration) UnmarshalText(text []byte) error {
|
||||
var err error
|
||||
d.Duration, err = time.ParseDuration(string(text))
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### More complex usage
|
||||
|
||||
Here's an example of how to load the example from the official spec page:
|
||||
|
||||
```toml
|
||||
# This is a TOML document. Boom.
|
||||
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
organization = "GitHub"
|
||||
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||
|
||||
[database]
|
||||
server = "192.168.1.1"
|
||||
ports = [ 8001, 8001, 8002 ]
|
||||
connection_max = 5000
|
||||
enabled = true
|
||||
|
||||
[servers]
|
||||
|
||||
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
dc = "eqdc10"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
dc = "eqdc10"
|
||||
|
||||
[clients]
|
||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||
|
||||
# Line breaks are OK when inside arrays
|
||||
hosts = [
|
||||
"alpha",
|
||||
"omega"
|
||||
]
|
||||
```
|
||||
|
||||
And the corresponding Go types are:
|
||||
|
||||
```go
|
||||
type tomlConfig struct {
|
||||
Title string
|
||||
Owner ownerInfo
|
||||
DB database `toml:"database"`
|
||||
Servers map[string]server
|
||||
Clients clients
|
||||
}
|
||||
|
||||
type ownerInfo struct {
|
||||
Name string
|
||||
Org string `toml:"organization"`
|
||||
Bio string
|
||||
DOB time.Time
|
||||
}
|
||||
|
||||
type database struct {
|
||||
Server string
|
||||
Ports []int
|
||||
ConnMax int `toml:"connection_max"`
|
||||
Enabled bool
|
||||
}
|
||||
|
||||
type server struct {
|
||||
IP string
|
||||
DC string
|
||||
}
|
||||
|
||||
type clients struct {
|
||||
Data [][]interface{}
|
||||
Hosts []string
|
||||
}
|
||||
```
|
||||
|
||||
Note that a case insensitive match will be tried if an exact match can't be
|
||||
found.
|
||||
|
||||
A working example of the above can be found in `_examples/example.{go,toml}`.
|
||||
|
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||
|
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Implements the TOML test suite interface
|
||||
|
||||
This is an implementation of the interface expected by
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for my
|
||||
[toml parser written in Go](https://github.com/BurntSushi/toml).
|
||||
In particular, it maps TOML data on `stdin` to a JSON format on `stdout`.
|
||||
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
||||
Compatible with `toml-test` version
|
||||
[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0)
|
||||
|
90
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go
generated
vendored
Normal file
90
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go
generated
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
// Command toml-test-decoder satisfies the toml-test interface for testing
|
||||
// TOML decoders. Namely, it accepts TOML on stdin and outputs JSON on stdout.
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
func usage() {
|
||||
log.Printf("Usage: %s < toml-file\n", path.Base(os.Args[0]))
|
||||
flag.PrintDefaults()
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
if flag.NArg() != 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
|
||||
var tmp interface{}
|
||||
if _, err := toml.DecodeReader(os.Stdin, &tmp); err != nil {
|
||||
log.Fatalf("Error decoding TOML: %s", err)
|
||||
}
|
||||
|
||||
typedTmp := translate(tmp)
|
||||
if err := json.NewEncoder(os.Stdout).Encode(typedTmp); err != nil {
|
||||
log.Fatalf("Error encoding JSON: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func translate(tomlData interface{}) interface{} {
|
||||
switch orig := tomlData.(type) {
|
||||
case map[string]interface{}:
|
||||
typed := make(map[string]interface{}, len(orig))
|
||||
for k, v := range orig {
|
||||
typed[k] = translate(v)
|
||||
}
|
||||
return typed
|
||||
case []map[string]interface{}:
|
||||
typed := make([]map[string]interface{}, len(orig))
|
||||
for i, v := range orig {
|
||||
typed[i] = translate(v).(map[string]interface{})
|
||||
}
|
||||
return typed
|
||||
case []interface{}:
|
||||
typed := make([]interface{}, len(orig))
|
||||
for i, v := range orig {
|
||||
typed[i] = translate(v)
|
||||
}
|
||||
|
||||
// We don't really need to tag arrays, but let's be future proof.
|
||||
// (If TOML ever supports tuples, we'll need this.)
|
||||
return tag("array", typed)
|
||||
case time.Time:
|
||||
return tag("datetime", orig.Format("2006-01-02T15:04:05Z"))
|
||||
case bool:
|
||||
return tag("bool", fmt.Sprintf("%v", orig))
|
||||
case int64:
|
||||
return tag("integer", fmt.Sprintf("%d", orig))
|
||||
case float64:
|
||||
return tag("float", fmt.Sprintf("%v", orig))
|
||||
case string:
|
||||
return tag("string", orig)
|
||||
}
|
||||
|
||||
panic(fmt.Sprintf("Unknown type: %T", tomlData))
|
||||
}
|
||||
|
||||
func tag(typeName string, data interface{}) map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
"type": typeName,
|
||||
"value": data,
|
||||
}
|
||||
}
|
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||
|
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Implements the TOML test suite interface for TOML encoders
|
||||
|
||||
This is an implementation of the interface expected by
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for the
|
||||
[TOML encoder](https://github.com/BurntSushi/toml).
|
||||
In particular, it maps JSON data on `stdin` to a TOML format on `stdout`.
|
||||
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
||||
Compatible with `toml-test` version
|
||||
[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0)
|
||||
|
131
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go
generated
vendored
Normal file
131
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go
generated
vendored
Normal file
|
@ -0,0 +1,131 @@
|
|||
// Command toml-test-encoder satisfies the toml-test interface for testing
|
||||
// TOML encoders. Namely, it accepts JSON on stdin and outputs TOML on stdout.
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
func usage() {
|
||||
log.Printf("Usage: %s < json-file\n", path.Base(os.Args[0]))
|
||||
flag.PrintDefaults()
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
if flag.NArg() != 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
|
||||
var tmp interface{}
|
||||
if err := json.NewDecoder(os.Stdin).Decode(&tmp); err != nil {
|
||||
log.Fatalf("Error decoding JSON: %s", err)
|
||||
}
|
||||
|
||||
tomlData := translate(tmp)
|
||||
if err := toml.NewEncoder(os.Stdout).Encode(tomlData); err != nil {
|
||||
log.Fatalf("Error encoding TOML: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func translate(typedJson interface{}) interface{} {
|
||||
switch v := typedJson.(type) {
|
||||
case map[string]interface{}:
|
||||
if len(v) == 2 && in("type", v) && in("value", v) {
|
||||
return untag(v)
|
||||
}
|
||||
m := make(map[string]interface{}, len(v))
|
||||
for k, v2 := range v {
|
||||
m[k] = translate(v2)
|
||||
}
|
||||
return m
|
||||
case []interface{}:
|
||||
tabArray := make([]map[string]interface{}, len(v))
|
||||
for i := range v {
|
||||
if m, ok := translate(v[i]).(map[string]interface{}); ok {
|
||||
tabArray[i] = m
|
||||
} else {
|
||||
log.Fatalf("JSON arrays may only contain objects. This " +
|
||||
"corresponds to only tables being allowed in " +
|
||||
"TOML table arrays.")
|
||||
}
|
||||
}
|
||||
return tabArray
|
||||
}
|
||||
log.Fatalf("Unrecognized JSON format '%T'.", typedJson)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func untag(typed map[string]interface{}) interface{} {
|
||||
t := typed["type"].(string)
|
||||
v := typed["value"]
|
||||
switch t {
|
||||
case "string":
|
||||
return v.(string)
|
||||
case "integer":
|
||||
v := v.(string)
|
||||
n, err := strconv.Atoi(v)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse '%s' as integer: %s", v, err)
|
||||
}
|
||||
return n
|
||||
case "float":
|
||||
v := v.(string)
|
||||
f, err := strconv.ParseFloat(v, 64)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse '%s' as float64: %s", v, err)
|
||||
}
|
||||
return f
|
||||
case "datetime":
|
||||
v := v.(string)
|
||||
t, err := time.Parse("2006-01-02T15:04:05Z", v)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse '%s' as a datetime: %s", v, err)
|
||||
}
|
||||
return t
|
||||
case "bool":
|
||||
v := v.(string)
|
||||
switch v {
|
||||
case "true":
|
||||
return true
|
||||
case "false":
|
||||
return false
|
||||
}
|
||||
log.Fatalf("Could not parse '%s' as a boolean.", v)
|
||||
case "array":
|
||||
v := v.([]interface{})
|
||||
array := make([]interface{}, len(v))
|
||||
for i := range v {
|
||||
if m, ok := v[i].(map[string]interface{}); ok {
|
||||
array[i] = untag(m)
|
||||
} else {
|
||||
log.Fatalf("Arrays may only contain other arrays or "+
|
||||
"primitive values, but found a '%T'.", m)
|
||||
}
|
||||
}
|
||||
return array
|
||||
}
|
||||
log.Fatalf("Unrecognized tag type '%s'.", t)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func in(key string, m map[string]interface{}) bool {
|
||||
_, ok := m[key]
|
||||
return ok
|
||||
}
|
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||
|
22
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md
generated
vendored
Normal file
22
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
# TOML Validator
|
||||
|
||||
If Go is installed, it's simple to try it out:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml/cmd/tomlv
|
||||
tomlv some-toml-file.toml
|
||||
```
|
||||
|
||||
You can see the types of every key in a TOML file with:
|
||||
|
||||
```bash
|
||||
tomlv -types some-toml-file.toml
|
||||
```
|
||||
|
||||
At the moment, only one error message is reported at a time. Error messages
|
||||
include line numbers. No output means that the files given are valid TOML, or
|
||||
there is a bug in `tomlv`.
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.1.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.1.0.md)
|
||||
|
61
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go
generated
vendored
Normal file
61
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
// Command tomlv validates TOML documents and prints each key's type.
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
var (
|
||||
flagTypes = false
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
|
||||
flag.BoolVar(&flagTypes, "types", flagTypes,
|
||||
"When set, the types of every defined key will be shown.")
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
func usage() {
|
||||
log.Printf("Usage: %s toml-file [ toml-file ... ]\n",
|
||||
path.Base(os.Args[0]))
|
||||
flag.PrintDefaults()
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
if flag.NArg() < 1 {
|
||||
flag.Usage()
|
||||
}
|
||||
for _, f := range flag.Args() {
|
||||
var tmp interface{}
|
||||
md, err := toml.DecodeFile(f, &tmp)
|
||||
if err != nil {
|
||||
log.Fatalf("Error in '%s': %s", f, err)
|
||||
}
|
||||
if flagTypes {
|
||||
printTypes(md)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func printTypes(md toml.MetaData) {
|
||||
tabw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
|
||||
for _, key := range md.Keys() {
|
||||
fmt.Fprintf(tabw, "%s%s\t%s\n",
|
||||
strings.Repeat(" ", len(key)-1), key, md.Type(key...))
|
||||
}
|
||||
tabw.Flush()
|
||||
}
|
472
Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go
generated
vendored
Normal file
472
Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go
generated
vendored
Normal file
|
@ -0,0 +1,472 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var e = fmt.Errorf
|
||||
|
||||
// Primitive is a TOML value that hasn't been decoded into a Go value.
|
||||
// When using the various `Decode*` functions, the type `Primitive` may
|
||||
// be given to any value, and its decoding will be delayed.
|
||||
//
|
||||
// A `Primitive` value can be decoded using the `PrimitiveDecode` function.
|
||||
//
|
||||
// The underlying representation of a `Primitive` value is subject to change.
|
||||
// Do not rely on it.
|
||||
//
|
||||
// N.B. Primitive values are still parsed, so using them will only avoid
|
||||
// the overhead of reflection. They can be useful when you don't know the
|
||||
// exact type of TOML data until run time.
|
||||
type Primitive struct {
|
||||
undecoded interface{}
|
||||
context Key
|
||||
}
|
||||
|
||||
// DEPRECATED!
|
||||
//
|
||||
// Use MetaData.PrimitiveDecode instead.
|
||||
func PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md := MetaData{decoded: make(map[string]bool)}
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// PrimitiveDecode is just like the other `Decode*` functions, except it
|
||||
// decodes a TOML value that has already been parsed. Valid primitive values
|
||||
// can *only* be obtained from values filled by the decoder functions,
|
||||
// including this method. (i.e., `v` may contain more `Primitive`
|
||||
// values.)
|
||||
//
|
||||
// Meta data for primitive values is included in the meta data returned by
|
||||
// the `Decode*` functions with one exception: keys returned by the Undecoded
|
||||
// method will only reflect keys that were decoded. Namely, any keys hidden
|
||||
// behind a Primitive will be considered undecoded. Executing this method will
|
||||
// update the undecoded keys in the meta data. (See the example.)
|
||||
func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md.context = primValue.context
|
||||
defer func() { md.context = nil }()
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// Decode will decode the contents of `data` in TOML format into a pointer
|
||||
// `v`.
|
||||
//
|
||||
// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be
|
||||
// used interchangeably.)
|
||||
//
|
||||
// TOML arrays of tables correspond to either a slice of structs or a slice
|
||||
// of maps.
|
||||
//
|
||||
// TOML datetimes correspond to Go `time.Time` values.
|
||||
//
|
||||
// All other TOML types (float, string, int, bool and array) correspond
|
||||
// to the obvious Go types.
|
||||
//
|
||||
// An exception to the above rules is if a type implements the
|
||||
// encoding.TextUnmarshaler interface. In this case, any primitive TOML value
|
||||
// (floats, strings, integers, booleans and datetimes) will be converted to
|
||||
// a byte string and given to the value's UnmarshalText method. See the
|
||||
// Unmarshaler example for a demonstration with time duration strings.
|
||||
//
|
||||
// Key mapping
|
||||
//
|
||||
// TOML keys can map to either keys in a Go map or field names in a Go
|
||||
// struct. The special `toml` struct tag may be used to map TOML keys to
|
||||
// struct fields that don't match the key name exactly. (See the example.)
|
||||
// A case insensitive match to struct names will be tried if an exact match
|
||||
// can't be found.
|
||||
//
|
||||
// The mapping between TOML values and Go values is loose. That is, there
|
||||
// may exist TOML values that cannot be placed into your representation, and
|
||||
// there may be parts of your representation that do not correspond to
|
||||
// TOML values. This loose mapping can be made stricter by using the IsDefined
|
||||
// and/or Undecoded methods on the MetaData returned.
|
||||
//
|
||||
// This decoder will not handle cyclic types. If a cyclic type is passed,
|
||||
// `Decode` will not terminate.
|
||||
func Decode(data string, v interface{}) (MetaData, error) {
|
||||
p, err := parse(data)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
md := MetaData{
|
||||
p.mapping, p.types, p.ordered,
|
||||
make(map[string]bool, len(p.ordered)), nil,
|
||||
}
|
||||
return md, md.unify(p.mapping, rvalue(v))
|
||||
}
|
||||
|
||||
// DecodeFile is just like Decode, except it will automatically read the
|
||||
// contents of the file at `fpath` and decode it for you.
|
||||
func DecodeFile(fpath string, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadFile(fpath)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// DecodeReader is just like Decode, except it will consume all bytes
|
||||
// from the reader and decode it for you.
|
||||
func DecodeReader(r io.Reader, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// unify performs a sort of type unification based on the structure of `rv`,
|
||||
// which is the client representation.
|
||||
//
|
||||
// Any type mismatch produces an error. Finding a type that we don't know
|
||||
// how to handle produces an unsupported type error.
|
||||
func (md *MetaData) unify(data interface{}, rv reflect.Value) error {
|
||||
// Special case. Look for a `Primitive` value.
|
||||
if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() {
|
||||
// Save the undecoded data and the key context into the primitive
|
||||
// value.
|
||||
context := make(Key, len(md.context))
|
||||
copy(context, md.context)
|
||||
rv.Set(reflect.ValueOf(Primitive{
|
||||
undecoded: data,
|
||||
context: context,
|
||||
}))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Special case. Handle time.Time values specifically.
|
||||
// TODO: Remove this code when we decide to drop support for Go 1.1.
|
||||
// This isn't necessary in Go 1.2 because time.Time satisfies the encoding
|
||||
// interfaces.
|
||||
if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) {
|
||||
return md.unifyDatetime(data, rv)
|
||||
}
|
||||
|
||||
// Special case. Look for a value satisfying the TextUnmarshaler interface.
|
||||
if v, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return md.unifyText(data, v)
|
||||
}
|
||||
// BUG(burntsushi)
|
||||
// The behavior here is incorrect whenever a Go type satisfies the
|
||||
// encoding.TextUnmarshaler interface but also corresponds to a TOML
|
||||
// hash or array. In particular, the unmarshaler should only be applied
|
||||
// to primitive TOML values. But at this point, it will be applied to
|
||||
// all kinds of values and produce an incorrect error whenever those values
|
||||
// are hashes or arrays (including arrays of tables).
|
||||
|
||||
k := rv.Kind()
|
||||
|
||||
// laziness
|
||||
if k >= reflect.Int && k <= reflect.Uint64 {
|
||||
return md.unifyInt(data, rv)
|
||||
}
|
||||
switch k {
|
||||
case reflect.Ptr:
|
||||
elem := reflect.New(rv.Type().Elem())
|
||||
err := md.unify(data, reflect.Indirect(elem))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rv.Set(elem)
|
||||
return nil
|
||||
case reflect.Struct:
|
||||
return md.unifyStruct(data, rv)
|
||||
case reflect.Map:
|
||||
return md.unifyMap(data, rv)
|
||||
case reflect.Array:
|
||||
return md.unifyArray(data, rv)
|
||||
case reflect.Slice:
|
||||
return md.unifySlice(data, rv)
|
||||
case reflect.String:
|
||||
return md.unifyString(data, rv)
|
||||
case reflect.Bool:
|
||||
return md.unifyBool(data, rv)
|
||||
case reflect.Interface:
|
||||
// we only support empty interfaces.
|
||||
if rv.NumMethod() > 0 {
|
||||
return e("Unsupported type '%s'.", rv.Kind())
|
||||
}
|
||||
return md.unifyAnything(data, rv)
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
return md.unifyFloat64(data, rv)
|
||||
}
|
||||
return e("Unsupported type '%s'.", rv.Kind())
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
return mismatch(rv, "map", mapping)
|
||||
}
|
||||
|
||||
for key, datum := range tmap {
|
||||
var f *field
|
||||
fields := cachedTypeFields(rv.Type())
|
||||
for i := range fields {
|
||||
ff := &fields[i]
|
||||
if ff.name == key {
|
||||
f = ff
|
||||
break
|
||||
}
|
||||
if f == nil && strings.EqualFold(ff.name, key) {
|
||||
f = ff
|
||||
}
|
||||
}
|
||||
if f != nil {
|
||||
subv := rv
|
||||
for _, i := range f.index {
|
||||
subv = indirect(subv.Field(i))
|
||||
}
|
||||
if isUnifiable(subv) {
|
||||
md.decoded[md.context.add(key).String()] = true
|
||||
md.context = append(md.context, key)
|
||||
if err := md.unify(datum, subv); err != nil {
|
||||
return e("Type mismatch for '%s.%s': %s",
|
||||
rv.Type().String(), f.name, err)
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
} else if f.name != "" {
|
||||
// Bad user! No soup for you!
|
||||
return e("Field '%s.%s' is unexported, and therefore cannot "+
|
||||
"be loaded with reflection.", rv.Type().String(), f.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
return badtype("map", mapping)
|
||||
}
|
||||
if rv.IsNil() {
|
||||
rv.Set(reflect.MakeMap(rv.Type()))
|
||||
}
|
||||
for k, v := range tmap {
|
||||
md.decoded[md.context.add(k).String()] = true
|
||||
md.context = append(md.context, k)
|
||||
|
||||
rvkey := indirect(reflect.New(rv.Type().Key()))
|
||||
rvval := reflect.Indirect(reflect.New(rv.Type().Elem()))
|
||||
if err := md.unify(v, rvval); err != nil {
|
||||
return err
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
|
||||
rvkey.SetString(k)
|
||||
rv.SetMapIndex(rvkey, rvval)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
return badtype("slice", data)
|
||||
}
|
||||
sliceLen := datav.Len()
|
||||
if sliceLen != rv.Len() {
|
||||
return e("expected array length %d; got TOML array of length %d",
|
||||
rv.Len(), sliceLen)
|
||||
}
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
return badtype("slice", data)
|
||||
}
|
||||
sliceLen := datav.Len()
|
||||
if rv.IsNil() {
|
||||
rv.Set(reflect.MakeSlice(rv.Type(), sliceLen, sliceLen))
|
||||
}
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySliceArray(data, rv reflect.Value) error {
|
||||
sliceLen := data.Len()
|
||||
for i := 0; i < sliceLen; i++ {
|
||||
v := data.Index(i).Interface()
|
||||
sliceval := indirect(rv.Index(i))
|
||||
if err := md.unify(v, sliceval); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error {
|
||||
if _, ok := data.(time.Time); ok {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
return badtype("time.Time", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error {
|
||||
if s, ok := data.(string); ok {
|
||||
rv.SetString(s)
|
||||
return nil
|
||||
}
|
||||
return badtype("string", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(float64); ok {
|
||||
switch rv.Kind() {
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
rv.SetFloat(num)
|
||||
default:
|
||||
panic("bug")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("float", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(int64); ok {
|
||||
if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Int8:
|
||||
if num < math.MinInt8 || num > math.MaxInt8 {
|
||||
return e("Value '%d' is out of range for int8.", num)
|
||||
}
|
||||
case reflect.Int16:
|
||||
if num < math.MinInt16 || num > math.MaxInt16 {
|
||||
return e("Value '%d' is out of range for int16.", num)
|
||||
}
|
||||
case reflect.Int32:
|
||||
if num < math.MinInt32 || num > math.MaxInt32 {
|
||||
return e("Value '%d' is out of range for int32.", num)
|
||||
}
|
||||
}
|
||||
rv.SetInt(num)
|
||||
} else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 {
|
||||
unum := uint64(num)
|
||||
switch rv.Kind() {
|
||||
case reflect.Uint, reflect.Uint64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Uint8:
|
||||
if num < 0 || unum > math.MaxUint8 {
|
||||
return e("Value '%d' is out of range for uint8.", num)
|
||||
}
|
||||
case reflect.Uint16:
|
||||
if num < 0 || unum > math.MaxUint16 {
|
||||
return e("Value '%d' is out of range for uint16.", num)
|
||||
}
|
||||
case reflect.Uint32:
|
||||
if num < 0 || unum > math.MaxUint32 {
|
||||
return e("Value '%d' is out of range for uint32.", num)
|
||||
}
|
||||
}
|
||||
rv.SetUint(unum)
|
||||
} else {
|
||||
panic("unreachable")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("integer", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error {
|
||||
if b, ok := data.(bool); ok {
|
||||
rv.SetBool(b)
|
||||
return nil
|
||||
}
|
||||
return badtype("boolean", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error {
|
||||
var s string
|
||||
switch sdata := data.(type) {
|
||||
case TextMarshaler:
|
||||
text, err := sdata.MarshalText()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s = string(text)
|
||||
case fmt.Stringer:
|
||||
s = sdata.String()
|
||||
case string:
|
||||
s = sdata
|
||||
case bool:
|
||||
s = fmt.Sprintf("%v", sdata)
|
||||
case int64:
|
||||
s = fmt.Sprintf("%d", sdata)
|
||||
case float64:
|
||||
s = fmt.Sprintf("%f", sdata)
|
||||
default:
|
||||
return badtype("primitive (string-like)", data)
|
||||
}
|
||||
if err := v.UnmarshalText([]byte(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// rvalue returns a reflect.Value of `v`. All pointers are resolved.
|
||||
func rvalue(v interface{}) reflect.Value {
|
||||
return indirect(reflect.ValueOf(v))
|
||||
}
|
||||
|
||||
// indirect returns the value pointed to by a pointer.
|
||||
// Pointers are followed until the value is not a pointer.
|
||||
// New values are allocated for each nil pointer.
|
||||
//
|
||||
// An exception to this rule is if the value satisfies an interface of
|
||||
// interest to us (like encoding.TextUnmarshaler).
|
||||
func indirect(v reflect.Value) reflect.Value {
|
||||
if v.Kind() != reflect.Ptr {
|
||||
if v.CanAddr() {
|
||||
pv := v.Addr()
|
||||
if _, ok := pv.Interface().(TextUnmarshaler); ok {
|
||||
return pv
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
if v.IsNil() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
return indirect(reflect.Indirect(v))
|
||||
}
|
||||
|
||||
func isUnifiable(rv reflect.Value) bool {
|
||||
if rv.CanSet() {
|
||||
return true
|
||||
}
|
||||
if _, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func badtype(expected string, data interface{}) error {
|
||||
return e("Expected %s but found '%T'.", expected, data)
|
||||
}
|
||||
|
||||
func mismatch(user reflect.Value, expected string, data interface{}) error {
|
||||
return e("Type mismatch for %s. Expected %s but found '%T'.",
|
||||
user.Type().String(), expected, data)
|
||||
}
|
99
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
Normal file
99
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
package toml
|
||||
|
||||
import "strings"
|
||||
|
||||
// MetaData allows access to meta information about TOML data that may not
|
||||
// be inferrable via reflection. In particular, whether a key has been defined
|
||||
// and the TOML type of a key.
|
||||
type MetaData struct {
|
||||
mapping map[string]interface{}
|
||||
types map[string]tomlType
|
||||
keys []Key
|
||||
decoded map[string]bool
|
||||
context Key // Used only during decoding.
|
||||
}
|
||||
|
||||
// IsDefined returns true if the key given exists in the TOML data. The key
|
||||
// should be specified hierarchially. e.g.,
|
||||
//
|
||||
// // access the TOML key 'a.b.c'
|
||||
// IsDefined("a", "b", "c")
|
||||
//
|
||||
// IsDefined will return false if an empty key given. Keys are case sensitive.
|
||||
func (md *MetaData) IsDefined(key ...string) bool {
|
||||
if len(key) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
var hash map[string]interface{}
|
||||
var ok bool
|
||||
var hashOrVal interface{} = md.mapping
|
||||
for _, k := range key {
|
||||
if hash, ok = hashOrVal.(map[string]interface{}); !ok {
|
||||
return false
|
||||
}
|
||||
if hashOrVal, ok = hash[k]; !ok {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Type returns a string representation of the type of the key specified.
|
||||
//
|
||||
// Type will return the empty string if given an empty key or a key that
|
||||
// does not exist. Keys are case sensitive.
|
||||
func (md *MetaData) Type(key ...string) string {
|
||||
fullkey := strings.Join(key, ".")
|
||||
if typ, ok := md.types[fullkey]; ok {
|
||||
return typ.typeString()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Key is the type of any TOML key, including key groups. Use (MetaData).Keys
|
||||
// to get values of this type.
|
||||
type Key []string
|
||||
|
||||
func (k Key) String() string {
|
||||
return strings.Join(k, ".")
|
||||
}
|
||||
|
||||
func (k Key) add(piece string) Key {
|
||||
newKey := make(Key, len(k)+1)
|
||||
copy(newKey, k)
|
||||
newKey[len(k)] = piece
|
||||
return newKey
|
||||
}
|
||||
|
||||
// Keys returns a slice of every key in the TOML data, including key groups.
|
||||
// Each key is itself a slice, where the first element is the top of the
|
||||
// hierarchy and the last is the most specific.
|
||||
//
|
||||
// The list will have the same order as the keys appeared in the TOML data.
|
||||
//
|
||||
// All keys returned are non-empty.
|
||||
func (md *MetaData) Keys() []Key {
|
||||
return md.keys
|
||||
}
|
||||
|
||||
// Undecoded returns all keys that have not been decoded in the order in which
|
||||
// they appear in the original TOML document.
|
||||
//
|
||||
// This includes keys that haven't been decoded because of a Primitive value.
|
||||
// Once the Primitive value is decoded, the keys will be considered decoded.
|
||||
//
|
||||
// Also note that decoding into an empty interface will result in no decoding,
|
||||
// and so no keys will be considered decoded.
|
||||
//
|
||||
// In this sense, the Undecoded keys correspond to keys in the TOML document
|
||||
// that do not have a concrete type in your representation.
|
||||
func (md *MetaData) Undecoded() []Key {
|
||||
undecoded := make([]Key, 0, len(md.keys))
|
||||
for _, key := range md.keys {
|
||||
if !md.decoded[key.String()] {
|
||||
undecoded = append(undecoded, key)
|
||||
}
|
||||
}
|
||||
return undecoded
|
||||
}
|
540
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go
generated
vendored
Normal file
540
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go
generated
vendored
Normal file
|
@ -0,0 +1,540 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
}
|
||||
|
||||
func TestDecodeSimple(t *testing.T) {
|
||||
var testSimple = `
|
||||
age = 250
|
||||
andrew = "gallant"
|
||||
kait = "brady"
|
||||
now = 1987-07-05T05:45:00Z
|
||||
yesOrNo = true
|
||||
pi = 3.14
|
||||
colors = [
|
||||
["red", "green", "blue"],
|
||||
["cyan", "magenta", "yellow", "black"],
|
||||
]
|
||||
|
||||
[My.Cats]
|
||||
plato = "cat 1"
|
||||
cauchy = "cat 2"
|
||||
`
|
||||
|
||||
type cats struct {
|
||||
Plato string
|
||||
Cauchy string
|
||||
}
|
||||
type simple struct {
|
||||
Age int
|
||||
Colors [][]string
|
||||
Pi float64
|
||||
YesOrNo bool
|
||||
Now time.Time
|
||||
Andrew string
|
||||
Kait string
|
||||
My map[string]cats
|
||||
}
|
||||
|
||||
var val simple
|
||||
_, err := Decode(testSimple, &val)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
now, err := time.Parse("2006-01-02T15:04:05", "1987-07-05T05:45:00")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
var answer = simple{
|
||||
Age: 250,
|
||||
Andrew: "gallant",
|
||||
Kait: "brady",
|
||||
Now: now,
|
||||
YesOrNo: true,
|
||||
Pi: 3.14,
|
||||
Colors: [][]string{
|
||||
{"red", "green", "blue"},
|
||||
{"cyan", "magenta", "yellow", "black"},
|
||||
},
|
||||
My: map[string]cats{
|
||||
"Cats": cats{Plato: "cat 1", Cauchy: "cat 2"},
|
||||
},
|
||||
}
|
||||
if !reflect.DeepEqual(val, answer) {
|
||||
t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n",
|
||||
answer, val)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeEmbedded(t *testing.T) {
|
||||
type Dog struct{ Name string }
|
||||
type Age int
|
||||
|
||||
tests := map[string]struct {
|
||||
input string
|
||||
decodeInto interface{}
|
||||
wantDecoded interface{}
|
||||
}{
|
||||
"embedded struct": {
|
||||
input: `Name = "milton"`,
|
||||
decodeInto: &struct{ Dog }{},
|
||||
wantDecoded: &struct{ Dog }{Dog{"milton"}},
|
||||
},
|
||||
"embedded non-nil pointer to struct": {
|
||||
input: `Name = "milton"`,
|
||||
decodeInto: &struct{ *Dog }{},
|
||||
wantDecoded: &struct{ *Dog }{&Dog{"milton"}},
|
||||
},
|
||||
"embedded nil pointer to struct": {
|
||||
input: ``,
|
||||
decodeInto: &struct{ *Dog }{},
|
||||
wantDecoded: &struct{ *Dog }{nil},
|
||||
},
|
||||
"embedded int": {
|
||||
input: `Age = -5`,
|
||||
decodeInto: &struct{ Age }{},
|
||||
wantDecoded: &struct{ Age }{-5},
|
||||
},
|
||||
}
|
||||
|
||||
for label, test := range tests {
|
||||
_, err := Decode(test.input, test.decodeInto)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(test.wantDecoded, test.decodeInto) {
|
||||
t.Errorf("%s: want decoded == %+v, got %+v",
|
||||
label, test.wantDecoded, test.decodeInto)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestTableArrays(t *testing.T) {
|
||||
var tomlTableArrays = `
|
||||
[[albums]]
|
||||
name = "Born to Run"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Jungleland"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Meeting Across the River"
|
||||
|
||||
[[albums]]
|
||||
name = "Born in the USA"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Glory Days"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Dancing in the Dark"
|
||||
`
|
||||
|
||||
type Song struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
type Album struct {
|
||||
Name string
|
||||
Songs []Song
|
||||
}
|
||||
|
||||
type Music struct {
|
||||
Albums []Album
|
||||
}
|
||||
|
||||
expected := Music{[]Album{
|
||||
{"Born to Run", []Song{{"Jungleland"}, {"Meeting Across the River"}}},
|
||||
{"Born in the USA", []Song{{"Glory Days"}, {"Dancing in the Dark"}}},
|
||||
}}
|
||||
var got Music
|
||||
if _, err := Decode(tomlTableArrays, &got); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, got) {
|
||||
t.Fatalf("\n%#v\n!=\n%#v\n", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
// Case insensitive matching tests.
|
||||
// A bit more comprehensive than needed given the current implementation,
|
||||
// but implementations change.
|
||||
// Probably still missing demonstrations of some ugly corner cases regarding
|
||||
// case insensitive matching and multiple fields.
|
||||
func TestCase(t *testing.T) {
|
||||
var caseToml = `
|
||||
tOpString = "string"
|
||||
tOpInt = 1
|
||||
tOpFloat = 1.1
|
||||
tOpBool = true
|
||||
tOpdate = 2006-01-02T15:04:05Z
|
||||
tOparray = [ "array" ]
|
||||
Match = "i should be in Match only"
|
||||
MatcH = "i should be in MatcH only"
|
||||
once = "just once"
|
||||
[nEst.eD]
|
||||
nEstedString = "another string"
|
||||
`
|
||||
|
||||
type InsensitiveEd struct {
|
||||
NestedString string
|
||||
}
|
||||
|
||||
type InsensitiveNest struct {
|
||||
Ed InsensitiveEd
|
||||
}
|
||||
|
||||
type Insensitive struct {
|
||||
TopString string
|
||||
TopInt int
|
||||
TopFloat float64
|
||||
TopBool bool
|
||||
TopDate time.Time
|
||||
TopArray []string
|
||||
Match string
|
||||
MatcH string
|
||||
Once string
|
||||
OncE string
|
||||
Nest InsensitiveNest
|
||||
}
|
||||
|
||||
tme, err := time.Parse(time.RFC3339, time.RFC3339[:len(time.RFC3339)-5])
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
expected := Insensitive{
|
||||
TopString: "string",
|
||||
TopInt: 1,
|
||||
TopFloat: 1.1,
|
||||
TopBool: true,
|
||||
TopDate: tme,
|
||||
TopArray: []string{"array"},
|
||||
MatcH: "i should be in MatcH only",
|
||||
Match: "i should be in Match only",
|
||||
Once: "just once",
|
||||
OncE: "",
|
||||
Nest: InsensitiveNest{
|
||||
Ed: InsensitiveEd{NestedString: "another string"},
|
||||
},
|
||||
}
|
||||
var got Insensitive
|
||||
if _, err := Decode(caseToml, &got); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, got) {
|
||||
t.Fatalf("\n%#v\n!=\n%#v\n", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPointers(t *testing.T) {
|
||||
type Object struct {
|
||||
Type string
|
||||
Description string
|
||||
}
|
||||
|
||||
type Dict struct {
|
||||
NamedObject map[string]*Object
|
||||
BaseObject *Object
|
||||
Strptr *string
|
||||
Strptrs []*string
|
||||
}
|
||||
s1, s2, s3 := "blah", "abc", "def"
|
||||
expected := &Dict{
|
||||
Strptr: &s1,
|
||||
Strptrs: []*string{&s2, &s3},
|
||||
NamedObject: map[string]*Object{
|
||||
"foo": {"FOO", "fooooo!!!"},
|
||||
"bar": {"BAR", "ba-ba-ba-ba-barrrr!!!"},
|
||||
},
|
||||
BaseObject: &Object{"BASE", "da base"},
|
||||
}
|
||||
|
||||
ex1 := `
|
||||
Strptr = "blah"
|
||||
Strptrs = ["abc", "def"]
|
||||
|
||||
[NamedObject.foo]
|
||||
Type = "FOO"
|
||||
Description = "fooooo!!!"
|
||||
|
||||
[NamedObject.bar]
|
||||
Type = "BAR"
|
||||
Description = "ba-ba-ba-ba-barrrr!!!"
|
||||
|
||||
[BaseObject]
|
||||
Type = "BASE"
|
||||
Description = "da base"
|
||||
`
|
||||
dict := new(Dict)
|
||||
_, err := Decode(ex1, dict)
|
||||
if err != nil {
|
||||
t.Errorf("Decode error: %v", err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, dict) {
|
||||
t.Fatalf("\n%#v\n!=\n%#v\n", expected, dict)
|
||||
}
|
||||
}
|
||||
|
||||
type sphere struct {
|
||||
Center [3]float64
|
||||
Radius float64
|
||||
}
|
||||
|
||||
func TestDecodeSimpleArray(t *testing.T) {
|
||||
var s1 sphere
|
||||
if _, err := Decode(`center = [0.0, 1.5, 0.0]`, &s1); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeArrayWrongSize(t *testing.T) {
|
||||
var s1 sphere
|
||||
if _, err := Decode(`center = [0.1, 2.3]`, &s1); err == nil {
|
||||
t.Fatal("Expected array type mismatch error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeLargeIntoSmallInt(t *testing.T) {
|
||||
type table struct {
|
||||
Value int8
|
||||
}
|
||||
var tab table
|
||||
if _, err := Decode(`value = 500`, &tab); err == nil {
|
||||
t.Fatal("Expected integer out-of-bounds error.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeSizedInts(t *testing.T) {
|
||||
type table struct {
|
||||
U8 uint8
|
||||
U16 uint16
|
||||
U32 uint32
|
||||
U64 uint64
|
||||
U uint
|
||||
I8 int8
|
||||
I16 int16
|
||||
I32 int32
|
||||
I64 int64
|
||||
I int
|
||||
}
|
||||
answer := table{1, 1, 1, 1, 1, -1, -1, -1, -1, -1}
|
||||
toml := `
|
||||
u8 = 1
|
||||
u16 = 1
|
||||
u32 = 1
|
||||
u64 = 1
|
||||
u = 1
|
||||
i8 = -1
|
||||
i16 = -1
|
||||
i32 = -1
|
||||
i64 = -1
|
||||
i = -1
|
||||
`
|
||||
var tab table
|
||||
if _, err := Decode(toml, &tab); err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
if answer != tab {
|
||||
t.Fatalf("Expected %#v but got %#v", answer, tab)
|
||||
}
|
||||
}
|
||||
|
||||
func ExampleMetaData_PrimitiveDecode() {
|
||||
var md MetaData
|
||||
var err error
|
||||
|
||||
var tomlBlob = `
|
||||
ranking = ["Springsteen", "J Geils"]
|
||||
|
||||
[bands.Springsteen]
|
||||
started = 1973
|
||||
albums = ["Greetings", "WIESS", "Born to Run", "Darkness"]
|
||||
|
||||
[bands.J Geils]
|
||||
started = 1970
|
||||
albums = ["The J. Geils Band", "Full House", "Blow Your Face Out"]
|
||||
`
|
||||
|
||||
type band struct {
|
||||
Started int
|
||||
Albums []string
|
||||
}
|
||||
type classics struct {
|
||||
Ranking []string
|
||||
Bands map[string]Primitive
|
||||
}
|
||||
|
||||
// Do the initial decode. Reflection is delayed on Primitive values.
|
||||
var music classics
|
||||
if md, err = Decode(tomlBlob, &music); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// MetaData still includes information on Primitive values.
|
||||
fmt.Printf("Is `bands.Springsteen` defined? %v\n",
|
||||
md.IsDefined("bands", "Springsteen"))
|
||||
|
||||
// Decode primitive data into Go values.
|
||||
for _, artist := range music.Ranking {
|
||||
// A band is a primitive value, so we need to decode it to get a
|
||||
// real `band` value.
|
||||
primValue := music.Bands[artist]
|
||||
|
||||
var aBand band
|
||||
if err = md.PrimitiveDecode(primValue, &aBand); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("%s started in %d.\n", artist, aBand.Started)
|
||||
}
|
||||
// Check to see if there were any fields left undecoded.
|
||||
// Note that this won't be empty before decoding the Primitive value!
|
||||
fmt.Printf("Undecoded: %q\n", md.Undecoded())
|
||||
|
||||
// Output:
|
||||
// Is `bands.Springsteen` defined? true
|
||||
// Springsteen started in 1973.
|
||||
// J Geils started in 1970.
|
||||
// Undecoded: []
|
||||
}
|
||||
|
||||
func ExampleDecode() {
|
||||
var tomlBlob = `
|
||||
# Some comments.
|
||||
[alpha]
|
||||
ip = "10.0.0.1"
|
||||
|
||||
[alpha.config]
|
||||
Ports = [ 8001, 8002 ]
|
||||
Location = "Toronto"
|
||||
Created = 1987-07-05T05:45:00Z
|
||||
|
||||
[beta]
|
||||
ip = "10.0.0.2"
|
||||
|
||||
[beta.config]
|
||||
Ports = [ 9001, 9002 ]
|
||||
Location = "New Jersey"
|
||||
Created = 1887-01-05T05:55:00Z
|
||||
`
|
||||
|
||||
type serverConfig struct {
|
||||
Ports []int
|
||||
Location string
|
||||
Created time.Time
|
||||
}
|
||||
|
||||
type server struct {
|
||||
IP string `toml:"ip"`
|
||||
Config serverConfig `toml:"config"`
|
||||
}
|
||||
|
||||
type servers map[string]server
|
||||
|
||||
var config servers
|
||||
if _, err := Decode(tomlBlob, &config); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
for _, name := range []string{"alpha", "beta"} {
|
||||
s := config[name]
|
||||
fmt.Printf("Server: %s (ip: %s) in %s created on %s\n",
|
||||
name, s.IP, s.Config.Location,
|
||||
s.Config.Created.Format("2006-01-02"))
|
||||
fmt.Printf("Ports: %v\n", s.Config.Ports)
|
||||
}
|
||||
|
||||
// Output:
|
||||
// Server: alpha (ip: 10.0.0.1) in Toronto created on 1987-07-05
|
||||
// Ports: [8001 8002]
|
||||
// Server: beta (ip: 10.0.0.2) in New Jersey created on 1887-01-05
|
||||
// Ports: [9001 9002]
|
||||
}
|
||||
|
||||
type duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (d *duration) UnmarshalText(text []byte) error {
|
||||
var err error
|
||||
d.Duration, err = time.ParseDuration(string(text))
|
||||
return err
|
||||
}
|
||||
|
||||
// Example Unmarshaler shows how to decode TOML strings into your own
|
||||
// custom data type.
|
||||
func Example_unmarshaler() {
|
||||
blob := `
|
||||
[[song]]
|
||||
name = "Thunder Road"
|
||||
duration = "4m49s"
|
||||
|
||||
[[song]]
|
||||
name = "Stairway to Heaven"
|
||||
duration = "8m03s"
|
||||
`
|
||||
type song struct {
|
||||
Name string
|
||||
Duration duration
|
||||
}
|
||||
type songs struct {
|
||||
Song []song
|
||||
}
|
||||
var favorites songs
|
||||
if _, err := Decode(blob, &favorites); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Code to implement the TextUnmarshaler interface for `duration`:
|
||||
//
|
||||
// type duration struct {
|
||||
// time.Duration
|
||||
// }
|
||||
//
|
||||
// func (d *duration) UnmarshalText(text []byte) error {
|
||||
// var err error
|
||||
// d.Duration, err = time.ParseDuration(string(text))
|
||||
// return err
|
||||
// }
|
||||
|
||||
for _, s := range favorites.Song {
|
||||
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
|
||||
}
|
||||
// Output:
|
||||
// Thunder Road (4m49s)
|
||||
// Stairway to Heaven (8m3s)
|
||||
}
|
||||
|
||||
// Example StrictDecoding shows how to detect whether there are keys in the
|
||||
// TOML document that weren't decoded into the value given. This is useful
|
||||
// for returning an error to the user if they've included extraneous fields
|
||||
// in their configuration.
|
||||
func Example_strictDecoding() {
|
||||
var blob = `
|
||||
key1 = "value1"
|
||||
key2 = "value2"
|
||||
key3 = "value3"
|
||||
`
|
||||
type config struct {
|
||||
Key1 string
|
||||
Key3 string
|
||||
}
|
||||
|
||||
var conf config
|
||||
md, err := Decode(blob, &conf)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("Undecoded keys: %q\n", md.Undecoded())
|
||||
// Output:
|
||||
// Undecoded keys: ["key2"]
|
||||
}
|
27
Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go
generated
vendored
Normal file
27
Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
Package toml provides facilities for decoding and encoding TOML configuration
|
||||
files via reflection. There is also support for delaying decoding with
|
||||
the Primitive type, and querying the set of keys in a TOML document with the
|
||||
MetaData type.
|
||||
|
||||
The specification implemented: https://github.com/mojombo/toml
|
||||
|
||||
The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify
|
||||
whether a file is a valid TOML document. It can also be used to print the
|
||||
type of each key in a TOML document.
|
||||
|
||||
Testing
|
||||
|
||||
There are two important types of tests used for this package. The first is
|
||||
contained inside '*_test.go' files and uses the standard Go unit testing
|
||||
framework. These tests are primarily devoted to holistically testing the
|
||||
decoder and encoder.
|
||||
|
||||
The second type of testing is used to verify the implementation's adherence
|
||||
to the TOML specification. These tests have been factored into their own
|
||||
project: https://github.com/BurntSushi/toml-test
|
||||
|
||||
The reason the tests are in a separate project is so that they can be used by
|
||||
any implementation of TOML. Namely, it is language agnostic.
|
||||
*/
|
||||
package toml
|
521
Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go
generated
vendored
Normal file
521
Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go
generated
vendored
Normal file
|
@ -0,0 +1,521 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type tomlEncodeError struct{ error }
|
||||
|
||||
var (
|
||||
errArrayMixedElementTypes = errors.New(
|
||||
"can't encode array with mixed element types")
|
||||
errArrayNilElement = errors.New(
|
||||
"can't encode array with nil element")
|
||||
errNonString = errors.New(
|
||||
"can't encode a map with non-string key type")
|
||||
errAnonNonStruct = errors.New(
|
||||
"can't encode an anonymous field that is not a struct")
|
||||
errArrayNoTable = errors.New(
|
||||
"TOML array element can't contain a table")
|
||||
errNoKey = errors.New(
|
||||
"top-level values must be a Go map or struct")
|
||||
errAnything = errors.New("") // used in testing
|
||||
)
|
||||
|
||||
var quotedReplacer = strings.NewReplacer(
|
||||
"\t", "\\t",
|
||||
"\n", "\\n",
|
||||
"\r", "\\r",
|
||||
"\"", "\\\"",
|
||||
"\\", "\\\\",
|
||||
)
|
||||
|
||||
// Encoder controls the encoding of Go values to a TOML document to some
|
||||
// io.Writer.
|
||||
//
|
||||
// The indentation level can be controlled with the Indent field.
|
||||
type Encoder struct {
|
||||
// A single indentation level. By default it is two spaces.
|
||||
Indent string
|
||||
|
||||
// hasWritten is whether we have written any output to w yet.
|
||||
hasWritten bool
|
||||
w *bufio.Writer
|
||||
}
|
||||
|
||||
// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer
|
||||
// given. By default, a single indentation level is 2 spaces.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{
|
||||
w: bufio.NewWriter(w),
|
||||
Indent: " ",
|
||||
}
|
||||
}
|
||||
|
||||
// Encode writes a TOML representation of the Go value to the underlying
|
||||
// io.Writer. If the value given cannot be encoded to a valid TOML document,
|
||||
// then an error is returned.
|
||||
//
|
||||
// The mapping between Go values and TOML values should be precisely the same
|
||||
// as for the Decode* functions. Similarly, the TextMarshaler interface is
|
||||
// supported by encoding the resulting bytes as strings. (If you want to write
|
||||
// arbitrary binary data then you will need to use something like base64 since
|
||||
// TOML does not have any binary types.)
|
||||
//
|
||||
// When encoding TOML hashes (i.e., Go maps or structs), keys without any
|
||||
// sub-hashes are encoded first.
|
||||
//
|
||||
// If a Go map is encoded, then its keys are sorted alphabetically for
|
||||
// deterministic output. More control over this behavior may be provided if
|
||||
// there is demand for it.
|
||||
//
|
||||
// Encoding Go values without a corresponding TOML representation---like map
|
||||
// types with non-string keys---will cause an error to be returned. Similarly
|
||||
// for mixed arrays/slices, arrays/slices with nil elements, embedded
|
||||
// non-struct types and nested slices containing maps or structs.
|
||||
// (e.g., [][]map[string]string is not allowed but []map[string]string is OK
|
||||
// and so is []map[string][]string.)
|
||||
func (enc *Encoder) Encode(v interface{}) error {
|
||||
rv := eindirect(reflect.ValueOf(v))
|
||||
if err := enc.safeEncode(Key([]string{}), rv); err != nil {
|
||||
return err
|
||||
}
|
||||
return enc.w.Flush()
|
||||
}
|
||||
|
||||
func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if terr, ok := r.(tomlEncodeError); ok {
|
||||
err = terr.error
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
enc.encode(key, rv)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (enc *Encoder) encode(key Key, rv reflect.Value) {
|
||||
// Special case. Time needs to be in ISO8601 format.
|
||||
// Special case. If we can marshal the type to text, then we used that.
|
||||
// Basically, this prevents the encoder for handling these types as
|
||||
// generic structs (or whatever the underlying type of a TextMarshaler is).
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time, TextMarshaler:
|
||||
enc.keyEqElement(key, rv)
|
||||
return
|
||||
}
|
||||
|
||||
k := rv.Kind()
|
||||
switch k {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64,
|
||||
reflect.Float32, reflect.Float64, reflect.String, reflect.Bool:
|
||||
enc.keyEqElement(key, rv)
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) {
|
||||
enc.eArrayOfTables(key, rv)
|
||||
} else {
|
||||
enc.keyEqElement(key, rv)
|
||||
}
|
||||
case reflect.Interface:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.eTable(key, rv)
|
||||
case reflect.Ptr:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Struct:
|
||||
enc.eTable(key, rv)
|
||||
default:
|
||||
panic(e("Unsupported type for key '%s': %s", key, k))
|
||||
}
|
||||
}
|
||||
|
||||
// eElement encodes any value that can be an array element (primitives and
|
||||
// arrays).
|
||||
func (enc *Encoder) eElement(rv reflect.Value) {
|
||||
switch v := rv.Interface().(type) {
|
||||
case time.Time:
|
||||
// Special case time.Time as a primitive. Has to come before
|
||||
// TextMarshaler below because time.Time implements
|
||||
// encoding.TextMarshaler, but we need to always use UTC.
|
||||
enc.wf(v.In(time.FixedZone("UTC", 0)).Format("2006-01-02T15:04:05Z"))
|
||||
return
|
||||
case TextMarshaler:
|
||||
// Special case. Use text marshaler if it's available for this value.
|
||||
if s, err := v.MarshalText(); err != nil {
|
||||
encPanic(err)
|
||||
} else {
|
||||
enc.writeQuoted(string(s))
|
||||
}
|
||||
return
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
enc.wf(strconv.FormatBool(rv.Bool()))
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
enc.wf(strconv.FormatInt(rv.Int(), 10))
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16,
|
||||
reflect.Uint32, reflect.Uint64:
|
||||
enc.wf(strconv.FormatUint(rv.Uint(), 10))
|
||||
case reflect.Float32:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32)))
|
||||
case reflect.Float64:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64)))
|
||||
case reflect.Array, reflect.Slice:
|
||||
enc.eArrayOrSliceElement(rv)
|
||||
case reflect.Interface:
|
||||
enc.eElement(rv.Elem())
|
||||
case reflect.String:
|
||||
enc.writeQuoted(rv.String())
|
||||
default:
|
||||
panic(e("Unexpected primitive type: %s", rv.Kind()))
|
||||
}
|
||||
}
|
||||
|
||||
// By the TOML spec, all floats must have a decimal with at least one
|
||||
// number on either side.
|
||||
func floatAddDecimal(fstr string) string {
|
||||
if !strings.Contains(fstr, ".") {
|
||||
return fstr + ".0"
|
||||
}
|
||||
return fstr
|
||||
}
|
||||
|
||||
func (enc *Encoder) writeQuoted(s string) {
|
||||
enc.wf("\"%s\"", quotedReplacer.Replace(s))
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) {
|
||||
length := rv.Len()
|
||||
enc.wf("[")
|
||||
for i := 0; i < length; i++ {
|
||||
elem := rv.Index(i)
|
||||
enc.eElement(elem)
|
||||
if i != length-1 {
|
||||
enc.wf(", ")
|
||||
}
|
||||
}
|
||||
enc.wf("]")
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
panicIfInvalidKey(key, true)
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
trv := rv.Index(i)
|
||||
if isNil(trv) {
|
||||
continue
|
||||
}
|
||||
enc.newline()
|
||||
enc.wf("%s[[%s]]", enc.indentStr(key), key.String())
|
||||
enc.newline()
|
||||
enc.eMapOrStruct(key, trv)
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eTable(key Key, rv reflect.Value) {
|
||||
if len(key) == 1 {
|
||||
// Output an extra new line between top-level tables.
|
||||
// (The newline isn't written if nothing else has been written though.)
|
||||
enc.newline()
|
||||
}
|
||||
if len(key) > 0 {
|
||||
panicIfInvalidKey(key, true)
|
||||
enc.wf("%s[%s]", enc.indentStr(key), key.String())
|
||||
enc.newline()
|
||||
}
|
||||
enc.eMapOrStruct(key, rv)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) {
|
||||
switch rv := eindirect(rv); rv.Kind() {
|
||||
case reflect.Map:
|
||||
enc.eMap(key, rv)
|
||||
case reflect.Struct:
|
||||
enc.eStruct(key, rv)
|
||||
default:
|
||||
panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMap(key Key, rv reflect.Value) {
|
||||
rt := rv.Type()
|
||||
if rt.Key().Kind() != reflect.String {
|
||||
encPanic(errNonString)
|
||||
}
|
||||
|
||||
// Sort keys so that we have deterministic output. And write keys directly
|
||||
// underneath this key first, before writing sub-structs or sub-maps.
|
||||
var mapKeysDirect, mapKeysSub []string
|
||||
for _, mapKey := range rv.MapKeys() {
|
||||
k := mapKey.String()
|
||||
if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) {
|
||||
mapKeysSub = append(mapKeysSub, k)
|
||||
} else {
|
||||
mapKeysDirect = append(mapKeysDirect, k)
|
||||
}
|
||||
}
|
||||
|
||||
var writeMapKeys = func(mapKeys []string) {
|
||||
sort.Strings(mapKeys)
|
||||
for _, mapKey := range mapKeys {
|
||||
mrv := rv.MapIndex(reflect.ValueOf(mapKey))
|
||||
if isNil(mrv) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
enc.encode(key.add(mapKey), mrv)
|
||||
}
|
||||
}
|
||||
writeMapKeys(mapKeysDirect)
|
||||
writeMapKeys(mapKeysSub)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eStruct(key Key, rv reflect.Value) {
|
||||
// Write keys for fields directly under this key first, because if we write
|
||||
// a field that creates a new table, then all keys under it will be in that
|
||||
// table (not the one we're writing here).
|
||||
rt := rv.Type()
|
||||
var fieldsDirect, fieldsSub [][]int
|
||||
var addFields func(rt reflect.Type, rv reflect.Value, start []int)
|
||||
addFields = func(rt reflect.Type, rv reflect.Value, start []int) {
|
||||
for i := 0; i < rt.NumField(); i++ {
|
||||
f := rt.Field(i)
|
||||
// skip unexporded fields
|
||||
if f.PkgPath != "" {
|
||||
continue
|
||||
}
|
||||
frv := rv.Field(i)
|
||||
if f.Anonymous {
|
||||
frv := eindirect(frv)
|
||||
t := frv.Type()
|
||||
if t.Kind() != reflect.Struct {
|
||||
encPanic(errAnonNonStruct)
|
||||
}
|
||||
addFields(t, frv, f.Index)
|
||||
} else if typeIsHash(tomlTypeOfGo(frv)) {
|
||||
fieldsSub = append(fieldsSub, append(start, f.Index...))
|
||||
} else {
|
||||
fieldsDirect = append(fieldsDirect, append(start, f.Index...))
|
||||
}
|
||||
}
|
||||
}
|
||||
addFields(rt, rv, nil)
|
||||
|
||||
var writeFields = func(fields [][]int) {
|
||||
for _, fieldIndex := range fields {
|
||||
sft := rt.FieldByIndex(fieldIndex)
|
||||
sf := rv.FieldByIndex(fieldIndex)
|
||||
if isNil(sf) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
|
||||
keyName := sft.Tag.Get("toml")
|
||||
if keyName == "-" {
|
||||
continue
|
||||
}
|
||||
if keyName == "" {
|
||||
keyName = sft.Name
|
||||
}
|
||||
enc.encode(key.add(keyName), sf)
|
||||
}
|
||||
}
|
||||
writeFields(fieldsDirect)
|
||||
writeFields(fieldsSub)
|
||||
}
|
||||
|
||||
// tomlTypeName returns the TOML type name of the Go value's type. It is used to
|
||||
// determine whether the types of array elements are mixed (which is forbidden).
|
||||
// If the Go value is nil, then it is illegal for it to be an array element, and
|
||||
// valueIsNil is returned as true.
|
||||
|
||||
// Returns the TOML type of a Go value. The type may be `nil`, which means
|
||||
// no concrete TOML type could be found.
|
||||
func tomlTypeOfGo(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() {
|
||||
return nil
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
return tomlBool
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64:
|
||||
return tomlInteger
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return tomlFloat
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlHash, tomlArrayType(rv)) {
|
||||
return tomlArrayHash
|
||||
} else {
|
||||
return tomlArray
|
||||
}
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return tomlTypeOfGo(rv.Elem())
|
||||
case reflect.String:
|
||||
return tomlString
|
||||
case reflect.Map:
|
||||
return tomlHash
|
||||
case reflect.Struct:
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time:
|
||||
return tomlDatetime
|
||||
case TextMarshaler:
|
||||
return tomlString
|
||||
default:
|
||||
return tomlHash
|
||||
}
|
||||
default:
|
||||
panic("unexpected reflect.Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
// tomlArrayType returns the element type of a TOML array. The type returned
|
||||
// may be nil if it cannot be determined (e.g., a nil slice or a zero length
|
||||
// slize). This function may also panic if it finds a type that cannot be
|
||||
// expressed in TOML (such as nil elements, heterogeneous arrays or directly
|
||||
// nested arrays of tables).
|
||||
func tomlArrayType(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() || rv.Len() == 0 {
|
||||
return nil
|
||||
}
|
||||
firstType := tomlTypeOfGo(rv.Index(0))
|
||||
if firstType == nil {
|
||||
encPanic(errArrayNilElement)
|
||||
}
|
||||
|
||||
rvlen := rv.Len()
|
||||
for i := 1; i < rvlen; i++ {
|
||||
elem := rv.Index(i)
|
||||
switch elemType := tomlTypeOfGo(elem); {
|
||||
case elemType == nil:
|
||||
encPanic(errArrayNilElement)
|
||||
case !typeEqual(firstType, elemType):
|
||||
encPanic(errArrayMixedElementTypes)
|
||||
}
|
||||
}
|
||||
// If we have a nested array, then we must make sure that the nested
|
||||
// array contains ONLY primitives.
|
||||
// This checks arbitrarily nested arrays.
|
||||
if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) {
|
||||
nest := tomlArrayType(eindirect(rv.Index(0)))
|
||||
if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) {
|
||||
encPanic(errArrayNoTable)
|
||||
}
|
||||
}
|
||||
return firstType
|
||||
}
|
||||
|
||||
func (enc *Encoder) newline() {
|
||||
if enc.hasWritten {
|
||||
enc.wf("\n")
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) keyEqElement(key Key, val reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
panicIfInvalidKey(key, false)
|
||||
enc.wf("%s%s = ", enc.indentStr(key), key[len(key)-1])
|
||||
enc.eElement(val)
|
||||
enc.newline()
|
||||
}
|
||||
|
||||
func (enc *Encoder) wf(format string, v ...interface{}) {
|
||||
if _, err := fmt.Fprintf(enc.w, format, v...); err != nil {
|
||||
encPanic(err)
|
||||
}
|
||||
enc.hasWritten = true
|
||||
}
|
||||
|
||||
func (enc *Encoder) indentStr(key Key) string {
|
||||
return strings.Repeat(enc.Indent, len(key)-1)
|
||||
}
|
||||
|
||||
func encPanic(err error) {
|
||||
panic(tomlEncodeError{err})
|
||||
}
|
||||
|
||||
func eindirect(v reflect.Value) reflect.Value {
|
||||
switch v.Kind() {
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return eindirect(v.Elem())
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func isNil(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
||||
return rv.IsNil()
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func panicIfInvalidKey(key Key, hash bool) {
|
||||
if hash {
|
||||
for _, k := range key {
|
||||
if !isValidTableName(k) {
|
||||
encPanic(e("Key '%s' is not a valid table name. Table names "+
|
||||
"cannot contain '[', ']' or '.'.", key.String()))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if !isValidKeyName(key[len(key)-1]) {
|
||||
encPanic(e("Key '%s' is not a name. Key names "+
|
||||
"cannot contain whitespace.", key.String()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isValidTableName(s string) bool {
|
||||
if len(s) == 0 {
|
||||
return false
|
||||
}
|
||||
for _, r := range s {
|
||||
if r == '[' || r == ']' || r == '.' {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func isValidKeyName(s string) bool {
|
||||
if len(s) == 0 {
|
||||
return false
|
||||
}
|
||||
for _, r := range s {
|
||||
if unicode.IsSpace(r) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
506
Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go
generated
vendored
Normal file
506
Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go
generated
vendored
Normal file
|
@ -0,0 +1,506 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"net"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestEncodeRoundTrip(t *testing.T) {
|
||||
type Config struct {
|
||||
Age int
|
||||
Cats []string
|
||||
Pi float64
|
||||
Perfection []int
|
||||
DOB time.Time
|
||||
Ipaddress net.IP
|
||||
}
|
||||
|
||||
var inputs = Config{
|
||||
13,
|
||||
[]string{"one", "two", "three"},
|
||||
3.145,
|
||||
[]int{11, 2, 3, 4},
|
||||
time.Now(),
|
||||
net.ParseIP("192.168.59.254"),
|
||||
}
|
||||
|
||||
var firstBuffer bytes.Buffer
|
||||
e := NewEncoder(&firstBuffer)
|
||||
err := e.Encode(inputs)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
var outputs Config
|
||||
if _, err := Decode(firstBuffer.String(), &outputs); err != nil {
|
||||
log.Printf("Could not decode:\n-----\n%s\n-----\n",
|
||||
firstBuffer.String())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// could test each value individually, but I'm lazy
|
||||
var secondBuffer bytes.Buffer
|
||||
e2 := NewEncoder(&secondBuffer)
|
||||
err = e2.Encode(outputs)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if firstBuffer.String() != secondBuffer.String() {
|
||||
t.Error(
|
||||
firstBuffer.String(),
|
||||
"\n\n is not identical to\n\n",
|
||||
secondBuffer.String())
|
||||
}
|
||||
}
|
||||
|
||||
// XXX(burntsushi)
|
||||
// I think these tests probably should be removed. They are good, but they
|
||||
// ought to be obsolete by toml-test.
|
||||
func TestEncode(t *testing.T) {
|
||||
type Embedded struct {
|
||||
Int int `toml:"_int"`
|
||||
}
|
||||
type NonStruct int
|
||||
|
||||
date := time.Date(2014, 5, 11, 20, 30, 40, 0, time.FixedZone("IST", 3600))
|
||||
dateStr := "2014-05-11T19:30:40Z"
|
||||
|
||||
tests := map[string]struct {
|
||||
input interface{}
|
||||
wantOutput string
|
||||
wantError error
|
||||
}{
|
||||
"bool field": {
|
||||
input: struct {
|
||||
BoolTrue bool
|
||||
BoolFalse bool
|
||||
}{true, false},
|
||||
wantOutput: "BoolTrue = true\nBoolFalse = false\n",
|
||||
},
|
||||
"int fields": {
|
||||
input: struct {
|
||||
Int int
|
||||
Int8 int8
|
||||
Int16 int16
|
||||
Int32 int32
|
||||
Int64 int64
|
||||
}{1, 2, 3, 4, 5},
|
||||
wantOutput: "Int = 1\nInt8 = 2\nInt16 = 3\nInt32 = 4\nInt64 = 5\n",
|
||||
},
|
||||
"uint fields": {
|
||||
input: struct {
|
||||
Uint uint
|
||||
Uint8 uint8
|
||||
Uint16 uint16
|
||||
Uint32 uint32
|
||||
Uint64 uint64
|
||||
}{1, 2, 3, 4, 5},
|
||||
wantOutput: "Uint = 1\nUint8 = 2\nUint16 = 3\nUint32 = 4" +
|
||||
"\nUint64 = 5\n",
|
||||
},
|
||||
"float fields": {
|
||||
input: struct {
|
||||
Float32 float32
|
||||
Float64 float64
|
||||
}{1.5, 2.5},
|
||||
wantOutput: "Float32 = 1.5\nFloat64 = 2.5\n",
|
||||
},
|
||||
"string field": {
|
||||
input: struct{ String string }{"foo"},
|
||||
wantOutput: "String = \"foo\"\n",
|
||||
},
|
||||
"string field and unexported field": {
|
||||
input: struct {
|
||||
String string
|
||||
unexported int
|
||||
}{"foo", 0},
|
||||
wantOutput: "String = \"foo\"\n",
|
||||
},
|
||||
"datetime field in UTC": {
|
||||
input: struct{ Date time.Time }{date},
|
||||
wantOutput: fmt.Sprintf("Date = %s\n", dateStr),
|
||||
},
|
||||
"datetime field as primitive": {
|
||||
// Using a map here to fail if isStructOrMap() returns true for
|
||||
// time.Time.
|
||||
input: map[string]interface{}{
|
||||
"Date": date,
|
||||
"Int": 1,
|
||||
},
|
||||
wantOutput: fmt.Sprintf("Date = %s\nInt = 1\n", dateStr),
|
||||
},
|
||||
"array fields": {
|
||||
input: struct {
|
||||
IntArray0 [0]int
|
||||
IntArray3 [3]int
|
||||
}{[0]int{}, [3]int{1, 2, 3}},
|
||||
wantOutput: "IntArray0 = []\nIntArray3 = [1, 2, 3]\n",
|
||||
},
|
||||
"slice fields": {
|
||||
input: struct{ IntSliceNil, IntSlice0, IntSlice3 []int }{
|
||||
nil, []int{}, []int{1, 2, 3},
|
||||
},
|
||||
wantOutput: "IntSlice0 = []\nIntSlice3 = [1, 2, 3]\n",
|
||||
},
|
||||
"datetime slices": {
|
||||
input: struct{ DatetimeSlice []time.Time }{
|
||||
[]time.Time{date, date},
|
||||
},
|
||||
wantOutput: fmt.Sprintf("DatetimeSlice = [%s, %s]\n",
|
||||
dateStr, dateStr),
|
||||
},
|
||||
"nested arrays and slices": {
|
||||
input: struct {
|
||||
SliceOfArrays [][2]int
|
||||
ArrayOfSlices [2][]int
|
||||
SliceOfArraysOfSlices [][2][]int
|
||||
ArrayOfSlicesOfArrays [2][][2]int
|
||||
SliceOfMixedArrays [][2]interface{}
|
||||
ArrayOfMixedSlices [2][]interface{}
|
||||
}{
|
||||
[][2]int{{1, 2}, {3, 4}},
|
||||
[2][]int{{1, 2}, {3, 4}},
|
||||
[][2][]int{
|
||||
{
|
||||
{1, 2}, {3, 4},
|
||||
},
|
||||
{
|
||||
{5, 6}, {7, 8},
|
||||
},
|
||||
},
|
||||
[2][][2]int{
|
||||
{
|
||||
{1, 2}, {3, 4},
|
||||
},
|
||||
{
|
||||
{5, 6}, {7, 8},
|
||||
},
|
||||
},
|
||||
[][2]interface{}{
|
||||
{1, 2}, {"a", "b"},
|
||||
},
|
||||
[2][]interface{}{
|
||||
{1, 2}, {"a", "b"},
|
||||
},
|
||||
},
|
||||
wantOutput: `SliceOfArrays = [[1, 2], [3, 4]]
|
||||
ArrayOfSlices = [[1, 2], [3, 4]]
|
||||
SliceOfArraysOfSlices = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]
|
||||
ArrayOfSlicesOfArrays = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]
|
||||
SliceOfMixedArrays = [[1, 2], ["a", "b"]]
|
||||
ArrayOfMixedSlices = [[1, 2], ["a", "b"]]
|
||||
`,
|
||||
},
|
||||
"empty slice": {
|
||||
input: struct{ Empty []interface{} }{[]interface{}{}},
|
||||
wantOutput: "Empty = []\n",
|
||||
},
|
||||
"(error) slice with element type mismatch (string and integer)": {
|
||||
input: struct{ Mixed []interface{} }{[]interface{}{1, "a"}},
|
||||
wantError: errArrayMixedElementTypes,
|
||||
},
|
||||
"(error) slice with element type mismatch (integer and float)": {
|
||||
input: struct{ Mixed []interface{} }{[]interface{}{1, 2.5}},
|
||||
wantError: errArrayMixedElementTypes,
|
||||
},
|
||||
"slice with elems of differing Go types, same TOML types": {
|
||||
input: struct {
|
||||
MixedInts []interface{}
|
||||
MixedFloats []interface{}
|
||||
}{
|
||||
[]interface{}{
|
||||
int(1), int8(2), int16(3), int32(4), int64(5),
|
||||
uint(1), uint8(2), uint16(3), uint32(4), uint64(5),
|
||||
},
|
||||
[]interface{}{float32(1.5), float64(2.5)},
|
||||
},
|
||||
wantOutput: "MixedInts = [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]\n" +
|
||||
"MixedFloats = [1.5, 2.5]\n",
|
||||
},
|
||||
"(error) slice w/ element type mismatch (one is nested array)": {
|
||||
input: struct{ Mixed []interface{} }{
|
||||
[]interface{}{1, []interface{}{2}},
|
||||
},
|
||||
wantError: errArrayMixedElementTypes,
|
||||
},
|
||||
"(error) slice with 1 nil element": {
|
||||
input: struct{ NilElement1 []interface{} }{[]interface{}{nil}},
|
||||
wantError: errArrayNilElement,
|
||||
},
|
||||
"(error) slice with 1 nil element (and other non-nil elements)": {
|
||||
input: struct{ NilElement []interface{} }{
|
||||
[]interface{}{1, nil},
|
||||
},
|
||||
wantError: errArrayNilElement,
|
||||
},
|
||||
"simple map": {
|
||||
input: map[string]int{"a": 1, "b": 2},
|
||||
wantOutput: "a = 1\nb = 2\n",
|
||||
},
|
||||
"map with interface{} value type": {
|
||||
input: map[string]interface{}{"a": 1, "b": "c"},
|
||||
wantOutput: "a = 1\nb = \"c\"\n",
|
||||
},
|
||||
"map with interface{} value type, some of which are structs": {
|
||||
input: map[string]interface{}{
|
||||
"a": struct{ Int int }{2},
|
||||
"b": 1,
|
||||
},
|
||||
wantOutput: "b = 1\n\n[a]\n Int = 2\n",
|
||||
},
|
||||
"nested map": {
|
||||
input: map[string]map[string]int{
|
||||
"a": {"b": 1},
|
||||
"c": {"d": 2},
|
||||
},
|
||||
wantOutput: "[a]\n b = 1\n\n[c]\n d = 2\n",
|
||||
},
|
||||
"nested struct": {
|
||||
input: struct{ Struct struct{ Int int } }{
|
||||
struct{ Int int }{1},
|
||||
},
|
||||
wantOutput: "[Struct]\n Int = 1\n",
|
||||
},
|
||||
"nested struct and non-struct field": {
|
||||
input: struct {
|
||||
Struct struct{ Int int }
|
||||
Bool bool
|
||||
}{struct{ Int int }{1}, true},
|
||||
wantOutput: "Bool = true\n\n[Struct]\n Int = 1\n",
|
||||
},
|
||||
"2 nested structs": {
|
||||
input: struct{ Struct1, Struct2 struct{ Int int } }{
|
||||
struct{ Int int }{1}, struct{ Int int }{2},
|
||||
},
|
||||
wantOutput: "[Struct1]\n Int = 1\n\n[Struct2]\n Int = 2\n",
|
||||
},
|
||||
"deeply nested structs": {
|
||||
input: struct {
|
||||
Struct1, Struct2 struct{ Struct3 *struct{ Int int } }
|
||||
}{
|
||||
struct{ Struct3 *struct{ Int int } }{&struct{ Int int }{1}},
|
||||
struct{ Struct3 *struct{ Int int } }{nil},
|
||||
},
|
||||
wantOutput: "[Struct1]\n [Struct1.Struct3]\n Int = 1" +
|
||||
"\n\n[Struct2]\n",
|
||||
},
|
||||
"nested struct with nil struct elem": {
|
||||
input: struct {
|
||||
Struct struct{ Inner *struct{ Int int } }
|
||||
}{
|
||||
struct{ Inner *struct{ Int int } }{nil},
|
||||
},
|
||||
wantOutput: "[Struct]\n",
|
||||
},
|
||||
"nested struct with no fields": {
|
||||
input: struct {
|
||||
Struct struct{ Inner struct{} }
|
||||
}{
|
||||
struct{ Inner struct{} }{struct{}{}},
|
||||
},
|
||||
wantOutput: "[Struct]\n [Struct.Inner]\n",
|
||||
},
|
||||
"struct with tags": {
|
||||
input: struct {
|
||||
Struct struct {
|
||||
Int int `toml:"_int"`
|
||||
} `toml:"_struct"`
|
||||
Bool bool `toml:"_bool"`
|
||||
}{
|
||||
struct {
|
||||
Int int `toml:"_int"`
|
||||
}{1}, true,
|
||||
},
|
||||
wantOutput: "_bool = true\n\n[_struct]\n _int = 1\n",
|
||||
},
|
||||
"embedded struct": {
|
||||
input: struct{ Embedded }{Embedded{1}},
|
||||
wantOutput: "_int = 1\n",
|
||||
},
|
||||
"embedded *struct": {
|
||||
input: struct{ *Embedded }{&Embedded{1}},
|
||||
wantOutput: "_int = 1\n",
|
||||
},
|
||||
"nested embedded struct": {
|
||||
input: struct {
|
||||
Struct struct{ Embedded } `toml:"_struct"`
|
||||
}{struct{ Embedded }{Embedded{1}}},
|
||||
wantOutput: "[_struct]\n _int = 1\n",
|
||||
},
|
||||
"nested embedded *struct": {
|
||||
input: struct {
|
||||
Struct struct{ *Embedded } `toml:"_struct"`
|
||||
}{struct{ *Embedded }{&Embedded{1}}},
|
||||
wantOutput: "[_struct]\n _int = 1\n",
|
||||
},
|
||||
"array of tables": {
|
||||
input: struct {
|
||||
Structs []*struct{ Int int } `toml:"struct"`
|
||||
}{
|
||||
[]*struct{ Int int }{{1}, {3}},
|
||||
},
|
||||
wantOutput: "[[struct]]\n Int = 1\n\n[[struct]]\n Int = 3\n",
|
||||
},
|
||||
"array of tables order": {
|
||||
input: map[string]interface{}{
|
||||
"map": map[string]interface{}{
|
||||
"zero": 5,
|
||||
"arr": []map[string]int{
|
||||
map[string]int{
|
||||
"friend": 5,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantOutput: "[map]\n zero = 5\n\n [[map.arr]]\n friend = 5\n",
|
||||
},
|
||||
"(error) top-level slice": {
|
||||
input: []struct{ Int int }{{1}, {2}, {3}},
|
||||
wantError: errNoKey,
|
||||
},
|
||||
"(error) slice of slice": {
|
||||
input: struct {
|
||||
Slices [][]struct{ Int int }
|
||||
}{
|
||||
[][]struct{ Int int }{{{1}}, {{2}}, {{3}}},
|
||||
},
|
||||
wantError: errArrayNoTable,
|
||||
},
|
||||
"(error) map no string key": {
|
||||
input: map[int]string{1: ""},
|
||||
wantError: errNonString,
|
||||
},
|
||||
"(error) anonymous non-struct": {
|
||||
input: struct{ NonStruct }{5},
|
||||
wantError: errAnonNonStruct,
|
||||
},
|
||||
"(error) empty key name": {
|
||||
input: map[string]int{"": 1},
|
||||
wantError: errAnything,
|
||||
},
|
||||
"(error) empty map name": {
|
||||
input: map[string]interface{}{
|
||||
"": map[string]int{"v": 1},
|
||||
},
|
||||
wantError: errAnything,
|
||||
},
|
||||
}
|
||||
for label, test := range tests {
|
||||
encodeExpected(t, label, test.input, test.wantOutput, test.wantError)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncodeNestedTableArrays(t *testing.T) {
|
||||
type song struct {
|
||||
Name string `toml:"name"`
|
||||
}
|
||||
type album struct {
|
||||
Name string `toml:"name"`
|
||||
Songs []song `toml:"songs"`
|
||||
}
|
||||
type springsteen struct {
|
||||
Albums []album `toml:"albums"`
|
||||
}
|
||||
value := springsteen{
|
||||
[]album{
|
||||
{"Born to Run",
|
||||
[]song{{"Jungleland"}, {"Meeting Across the River"}}},
|
||||
{"Born in the USA",
|
||||
[]song{{"Glory Days"}, {"Dancing in the Dark"}}},
|
||||
},
|
||||
}
|
||||
expected := `[[albums]]
|
||||
name = "Born to Run"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Jungleland"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Meeting Across the River"
|
||||
|
||||
[[albums]]
|
||||
name = "Born in the USA"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Glory Days"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Dancing in the Dark"
|
||||
`
|
||||
encodeExpected(t, "nested table arrays", value, expected, nil)
|
||||
}
|
||||
|
||||
func TestEncodeArrayHashWithNormalHashOrder(t *testing.T) {
|
||||
type Alpha struct {
|
||||
V int
|
||||
}
|
||||
type Beta struct {
|
||||
V int
|
||||
}
|
||||
type Conf struct {
|
||||
V int
|
||||
A Alpha
|
||||
B []Beta
|
||||
}
|
||||
|
||||
val := Conf{
|
||||
V: 1,
|
||||
A: Alpha{2},
|
||||
B: []Beta{{3}},
|
||||
}
|
||||
expected := "V = 1\n\n[A]\n V = 2\n\n[[B]]\n V = 3\n"
|
||||
encodeExpected(t, "array hash with normal hash order", val, expected, nil)
|
||||
}
|
||||
|
||||
func encodeExpected(
|
||||
t *testing.T, label string, val interface{}, wantStr string, wantErr error,
|
||||
) {
|
||||
var buf bytes.Buffer
|
||||
enc := NewEncoder(&buf)
|
||||
err := enc.Encode(val)
|
||||
if err != wantErr {
|
||||
if wantErr != nil {
|
||||
if wantErr == errAnything && err != nil {
|
||||
return
|
||||
}
|
||||
t.Errorf("%s: want Encode error %v, got %v", label, wantErr, err)
|
||||
} else {
|
||||
t.Errorf("%s: Encode failed: %s", label, err)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if got := buf.String(); wantStr != got {
|
||||
t.Errorf("%s: want\n-----\n%q\n-----\nbut got\n-----\n%q\n-----\n",
|
||||
label, wantStr, got)
|
||||
}
|
||||
}
|
||||
|
||||
func ExampleEncoder_Encode() {
|
||||
date, _ := time.Parse(time.RFC822, "14 Mar 10 18:00 UTC")
|
||||
var config = map[string]interface{}{
|
||||
"date": date,
|
||||
"counts": []int{1, 1, 2, 3, 5, 8},
|
||||
"hash": map[string]string{
|
||||
"key1": "val1",
|
||||
"key2": "val2",
|
||||
},
|
||||
}
|
||||
buf := new(bytes.Buffer)
|
||||
if err := NewEncoder(buf).Encode(config); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println(buf.String())
|
||||
|
||||
// Output:
|
||||
// counts = [1, 1, 2, 3, 5, 8]
|
||||
// date = 2010-03-14T18:00:00Z
|
||||
//
|
||||
// [hash]
|
||||
// key1 = "val1"
|
||||
// key2 = "val2"
|
||||
}
|
19
Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
Normal file
19
Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
// +build go1.2
|
||||
|
||||
package toml
|
||||
|
||||
// In order to support Go 1.1, we define our own TextMarshaler and
|
||||
// TextUnmarshaler types. For Go 1.2+, we just alias them with the
|
||||
// standard library interfaces.
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
)
|
||||
|
||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextMarshaler encoding.TextMarshaler
|
||||
|
||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextUnmarshaler encoding.TextUnmarshaler
|
18
Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
Normal file
18
Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
// +build !go1.2
|
||||
|
||||
package toml
|
||||
|
||||
// These interfaces were introduced in Go 1.2, so we add them manually when
|
||||
// compiling for Go 1.1.
|
||||
|
||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextMarshaler interface {
|
||||
MarshalText() (text []byte, err error)
|
||||
}
|
||||
|
||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextUnmarshaler interface {
|
||||
UnmarshalText(text []byte) error
|
||||
}
|
725
Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go
generated
vendored
Normal file
725
Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go
generated
vendored
Normal file
|
@ -0,0 +1,725 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type itemType int
|
||||
|
||||
const (
|
||||
itemError itemType = iota
|
||||
itemNIL // used in the parser to indicate no type
|
||||
itemEOF
|
||||
itemText
|
||||
itemString
|
||||
itemBool
|
||||
itemInteger
|
||||
itemFloat
|
||||
itemDatetime
|
||||
itemArray // the start of an array
|
||||
itemArrayEnd
|
||||
itemTableStart
|
||||
itemTableEnd
|
||||
itemArrayTableStart
|
||||
itemArrayTableEnd
|
||||
itemKeyStart
|
||||
itemCommentStart
|
||||
)
|
||||
|
||||
const (
|
||||
eof = 0
|
||||
tableStart = '['
|
||||
tableEnd = ']'
|
||||
arrayTableStart = '['
|
||||
arrayTableEnd = ']'
|
||||
tableSep = '.'
|
||||
keySep = '='
|
||||
arrayStart = '['
|
||||
arrayEnd = ']'
|
||||
arrayValTerm = ','
|
||||
commentStart = '#'
|
||||
stringStart = '"'
|
||||
stringEnd = '"'
|
||||
)
|
||||
|
||||
type stateFn func(lx *lexer) stateFn
|
||||
|
||||
type lexer struct {
|
||||
input string
|
||||
start int
|
||||
pos int
|
||||
width int
|
||||
line int
|
||||
state stateFn
|
||||
items chan item
|
||||
|
||||
// A stack of state functions used to maintain context.
|
||||
// The idea is to reuse parts of the state machine in various places.
|
||||
// For example, values can appear at the top level or within arbitrarily
|
||||
// nested arrays. The last state on the stack is used after a value has
|
||||
// been lexed. Similarly for comments.
|
||||
stack []stateFn
|
||||
}
|
||||
|
||||
type item struct {
|
||||
typ itemType
|
||||
val string
|
||||
line int
|
||||
}
|
||||
|
||||
func (lx *lexer) nextItem() item {
|
||||
for {
|
||||
select {
|
||||
case item := <-lx.items:
|
||||
return item
|
||||
default:
|
||||
lx.state = lx.state(lx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func lex(input string) *lexer {
|
||||
lx := &lexer{
|
||||
input: input + "\n",
|
||||
state: lexTop,
|
||||
line: 1,
|
||||
items: make(chan item, 10),
|
||||
stack: make([]stateFn, 0, 10),
|
||||
}
|
||||
return lx
|
||||
}
|
||||
|
||||
func (lx *lexer) push(state stateFn) {
|
||||
lx.stack = append(lx.stack, state)
|
||||
}
|
||||
|
||||
func (lx *lexer) pop() stateFn {
|
||||
if len(lx.stack) == 0 {
|
||||
return lx.errorf("BUG in lexer: no states to pop.")
|
||||
}
|
||||
last := lx.stack[len(lx.stack)-1]
|
||||
lx.stack = lx.stack[0 : len(lx.stack)-1]
|
||||
return last
|
||||
}
|
||||
|
||||
func (lx *lexer) current() string {
|
||||
return lx.input[lx.start:lx.pos]
|
||||
}
|
||||
|
||||
func (lx *lexer) emit(typ itemType) {
|
||||
lx.items <- item{typ, lx.current(), lx.line}
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
func (lx *lexer) next() (r rune) {
|
||||
if lx.pos >= len(lx.input) {
|
||||
lx.width = 0
|
||||
return eof
|
||||
}
|
||||
|
||||
if lx.input[lx.pos] == '\n' {
|
||||
lx.line++
|
||||
}
|
||||
r, lx.width = utf8.DecodeRuneInString(lx.input[lx.pos:])
|
||||
lx.pos += lx.width
|
||||
return r
|
||||
}
|
||||
|
||||
// ignore skips over the pending input before this point.
|
||||
func (lx *lexer) ignore() {
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
// backup steps back one rune. Can be called only once per call of next.
|
||||
func (lx *lexer) backup() {
|
||||
lx.pos -= lx.width
|
||||
if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' {
|
||||
lx.line--
|
||||
}
|
||||
}
|
||||
|
||||
// accept consumes the next rune if it's equal to `valid`.
|
||||
func (lx *lexer) accept(valid rune) bool {
|
||||
if lx.next() == valid {
|
||||
return true
|
||||
}
|
||||
lx.backup()
|
||||
return false
|
||||
}
|
||||
|
||||
// peek returns but does not consume the next rune in the input.
|
||||
func (lx *lexer) peek() rune {
|
||||
r := lx.next()
|
||||
lx.backup()
|
||||
return r
|
||||
}
|
||||
|
||||
// errorf stops all lexing by emitting an error and returning `nil`.
|
||||
// Note that any value that is a character is escaped if it's a special
|
||||
// character (new lines, tabs, etc.).
|
||||
func (lx *lexer) errorf(format string, values ...interface{}) stateFn {
|
||||
lx.items <- item{
|
||||
itemError,
|
||||
fmt.Sprintf(format, values...),
|
||||
lx.line,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// lexTop consumes elements at the top level of TOML data.
|
||||
func lexTop(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isWhitespace(r) || isNL(r) {
|
||||
return lexSkip(lx, lexTop)
|
||||
}
|
||||
|
||||
switch r {
|
||||
case commentStart:
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case tableStart:
|
||||
return lexTableStart
|
||||
case eof:
|
||||
if lx.pos > lx.start {
|
||||
return lx.errorf("Unexpected EOF.")
|
||||
}
|
||||
lx.emit(itemEOF)
|
||||
return nil
|
||||
}
|
||||
|
||||
// At this point, the only valid item can be a key, so we back up
|
||||
// and let the key lexer do the rest.
|
||||
lx.backup()
|
||||
lx.push(lexTopEnd)
|
||||
return lexKeyStart
|
||||
}
|
||||
|
||||
// lexTopEnd is entered whenever a top-level item has been consumed. (A value
|
||||
// or a table.) It must see only whitespace, and will turn back to lexTop
|
||||
// upon a new line. If it sees EOF, it will quit the lexer successfully.
|
||||
func lexTopEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == commentStart:
|
||||
// a comment will read to a new line for us.
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case isWhitespace(r):
|
||||
return lexTopEnd
|
||||
case isNL(r):
|
||||
lx.ignore()
|
||||
return lexTop
|
||||
case r == eof:
|
||||
lx.ignore()
|
||||
return lexTop
|
||||
}
|
||||
return lx.errorf("Expected a top-level item to end with a new line, "+
|
||||
"comment or EOF, but got %q instead.", r)
|
||||
}
|
||||
|
||||
// lexTable lexes the beginning of a table. Namely, it makes sure that
|
||||
// it starts with a character other than '.' and ']'.
|
||||
// It assumes that '[' has already been consumed.
|
||||
// It also handles the case that this is an item in an array of tables.
|
||||
// e.g., '[[name]]'.
|
||||
func lexTableStart(lx *lexer) stateFn {
|
||||
if lx.peek() == arrayTableStart {
|
||||
lx.next()
|
||||
lx.emit(itemArrayTableStart)
|
||||
lx.push(lexArrayTableEnd)
|
||||
} else {
|
||||
lx.emit(itemTableStart)
|
||||
lx.push(lexTableEnd)
|
||||
}
|
||||
return lexTableNameStart
|
||||
}
|
||||
|
||||
func lexTableEnd(lx *lexer) stateFn {
|
||||
lx.emit(itemTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexArrayTableEnd(lx *lexer) stateFn {
|
||||
if r := lx.next(); r != arrayTableEnd {
|
||||
return lx.errorf("Expected end of table array name delimiter %q, "+
|
||||
"but got %q instead.", arrayTableEnd, r)
|
||||
}
|
||||
lx.emit(itemArrayTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexTableNameStart(lx *lexer) stateFn {
|
||||
switch lx.next() {
|
||||
case tableEnd:
|
||||
return lx.errorf("Unexpected end of table. (Tables cannot " +
|
||||
"be empty.)")
|
||||
case tableSep:
|
||||
return lx.errorf("Unexpected table separator. (Tables cannot " +
|
||||
"be empty.)")
|
||||
}
|
||||
return lexTableName
|
||||
}
|
||||
|
||||
// lexTableName lexes the name of a table. It assumes that at least one
|
||||
// valid character for the table has already been read.
|
||||
func lexTableName(lx *lexer) stateFn {
|
||||
switch lx.peek() {
|
||||
case tableStart:
|
||||
return lx.errorf("Table names cannot contain %q or %q.",
|
||||
tableStart, tableEnd)
|
||||
case tableEnd:
|
||||
lx.emit(itemText)
|
||||
lx.next()
|
||||
return lx.pop()
|
||||
case tableSep:
|
||||
lx.emit(itemText)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lexTableNameStart
|
||||
}
|
||||
lx.next()
|
||||
return lexTableName
|
||||
}
|
||||
|
||||
// lexKeyStart consumes a key name up until the first non-whitespace character.
|
||||
// lexKeyStart will ignore whitespace.
|
||||
func lexKeyStart(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
switch {
|
||||
case r == keySep:
|
||||
return lx.errorf("Unexpected key separator %q.", keySep)
|
||||
case isWhitespace(r) || isNL(r):
|
||||
lx.next()
|
||||
return lexSkip(lx, lexKeyStart)
|
||||
}
|
||||
|
||||
lx.ignore()
|
||||
lx.emit(itemKeyStart)
|
||||
lx.next()
|
||||
return lexKey
|
||||
}
|
||||
|
||||
// lexKey consumes the text of a key. Assumes that the first character (which
|
||||
// is not whitespace) has already been consumed.
|
||||
func lexKey(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
|
||||
// XXX: Possible divergence from spec?
|
||||
// "Keys start with the first non-whitespace character and end with the
|
||||
// last non-whitespace character before the equals sign."
|
||||
// Note here that whitespace is either a tab or a space.
|
||||
// But we'll call it quits if we see a new line too.
|
||||
if isWhitespace(r) || isNL(r) {
|
||||
lx.emit(itemText)
|
||||
return lexKeyEnd
|
||||
}
|
||||
|
||||
// Let's also call it quits if we see an equals sign.
|
||||
if r == keySep {
|
||||
lx.emit(itemText)
|
||||
return lexKeyEnd
|
||||
}
|
||||
|
||||
lx.next()
|
||||
return lexKey
|
||||
}
|
||||
|
||||
// lexKeyEnd consumes the end of a key (up to the key separator).
|
||||
// Assumes that the first whitespace character after a key (or the '='
|
||||
// separator) has NOT been consumed.
|
||||
func lexKeyEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexKeyEnd)
|
||||
case r == keySep:
|
||||
return lexSkip(lx, lexValue)
|
||||
}
|
||||
return lx.errorf("Expected key separator %q, but got %q instead.",
|
||||
keySep, r)
|
||||
}
|
||||
|
||||
// lexValue starts the consumption of a value anywhere a value is expected.
|
||||
// lexValue will ignore whitespace.
|
||||
// After a value is lexed, the last state on the next is popped and returned.
|
||||
func lexValue(lx *lexer) stateFn {
|
||||
// We allow whitespace to precede a value, but NOT new lines.
|
||||
// In array syntax, the array states are responsible for ignoring new lines.
|
||||
r := lx.next()
|
||||
if isWhitespace(r) {
|
||||
return lexSkip(lx, lexValue)
|
||||
}
|
||||
|
||||
switch {
|
||||
case r == arrayStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemArray)
|
||||
return lexArrayValue
|
||||
case r == stringStart:
|
||||
lx.ignore() // ignore the '"'
|
||||
return lexString
|
||||
case r == 't':
|
||||
return lexTrue
|
||||
case r == 'f':
|
||||
return lexFalse
|
||||
case r == '-':
|
||||
return lexNumberStart
|
||||
case isDigit(r):
|
||||
lx.backup() // avoid an extra state and use the same as above
|
||||
return lexNumberOrDateStart
|
||||
case r == '.': // special error case, be kind to users
|
||||
return lx.errorf("Floats must start with a digit, not '.'.")
|
||||
}
|
||||
return lx.errorf("Expected value but found %q instead.", r)
|
||||
}
|
||||
|
||||
// lexArrayValue consumes one value in an array. It assumes that '[' or ','
|
||||
// have already been consumed. All whitespace and new lines are ignored.
|
||||
func lexArrayValue(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValue)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValue)
|
||||
return lexCommentStart
|
||||
case r == arrayValTerm:
|
||||
return lx.errorf("Unexpected array value terminator %q.",
|
||||
arrayValTerm)
|
||||
case r == arrayEnd:
|
||||
return lexArrayEnd
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexValue
|
||||
}
|
||||
|
||||
// lexArrayValueEnd consumes the cruft between values of an array. Namely,
|
||||
// it ignores whitespace and expects either a ',' or a ']'.
|
||||
func lexArrayValueEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValueEnd)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexCommentStart
|
||||
case r == arrayValTerm:
|
||||
lx.ignore()
|
||||
return lexArrayValue // move on to the next value
|
||||
case r == arrayEnd:
|
||||
return lexArrayEnd
|
||||
}
|
||||
return lx.errorf("Expected an array value terminator %q or an array "+
|
||||
"terminator %q, but got %q instead.", arrayValTerm, arrayEnd, r)
|
||||
}
|
||||
|
||||
// lexArrayEnd finishes the lexing of an array. It assumes that a ']' has
|
||||
// just been consumed.
|
||||
func lexArrayEnd(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemArrayEnd)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexString consumes the inner contents of a string. It assumes that the
|
||||
// beginning '"' has already been consumed and ignored.
|
||||
func lexString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isNL(r):
|
||||
return lx.errorf("Strings cannot contain new lines.")
|
||||
case r == '\\':
|
||||
return lexStringEscape
|
||||
case r == stringEnd:
|
||||
lx.backup()
|
||||
lx.emit(itemString)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
return lexString
|
||||
}
|
||||
|
||||
// lexStringEscape consumes an escaped character. It assumes that the preceding
|
||||
// '\\' has already been consumed.
|
||||
func lexStringEscape(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch r {
|
||||
case 'b':
|
||||
fallthrough
|
||||
case 't':
|
||||
fallthrough
|
||||
case 'n':
|
||||
fallthrough
|
||||
case 'f':
|
||||
fallthrough
|
||||
case 'r':
|
||||
fallthrough
|
||||
case '"':
|
||||
fallthrough
|
||||
case '/':
|
||||
fallthrough
|
||||
case '\\':
|
||||
return lexString
|
||||
case 'u':
|
||||
return lexStringUnicode
|
||||
}
|
||||
return lx.errorf("Invalid escape character %q. Only the following "+
|
||||
"escape characters are allowed: "+
|
||||
"\\b, \\t, \\n, \\f, \\r, \\\", \\/, \\\\, and \\uXXXX.", r)
|
||||
}
|
||||
|
||||
// lexStringBinary consumes two hexadecimal digits following '\x'. It assumes
|
||||
// that the '\x' has already been consumed.
|
||||
func lexStringUnicode(lx *lexer) stateFn {
|
||||
var r rune
|
||||
|
||||
for i := 0; i < 4; i++ {
|
||||
r = lx.next()
|
||||
if !isHexadecimal(r) {
|
||||
return lx.errorf("Expected four hexadecimal digits after '\\x', "+
|
||||
"but got '%s' instead.", lx.current())
|
||||
}
|
||||
}
|
||||
return lexString
|
||||
}
|
||||
|
||||
// lexNumberOrDateStart consumes either a (positive) integer, float or datetime.
|
||||
// It assumes that NO negative sign has been consumed.
|
||||
func lexNumberOrDateStart(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
if r == '.' {
|
||||
return lx.errorf("Floats must start with a digit, not '.'.")
|
||||
} else {
|
||||
return lx.errorf("Expected a digit but got %q.", r)
|
||||
}
|
||||
}
|
||||
return lexNumberOrDate
|
||||
}
|
||||
|
||||
// lexNumberOrDate consumes either a (positive) integer, float or datetime.
|
||||
func lexNumberOrDate(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == '-':
|
||||
if lx.pos-lx.start != 5 {
|
||||
return lx.errorf("All ISO8601 dates must be in full Zulu form.")
|
||||
}
|
||||
return lexDateAfterYear
|
||||
case isDigit(r):
|
||||
return lexNumberOrDate
|
||||
case r == '.':
|
||||
return lexFloatStart
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexDateAfterYear consumes a full Zulu Datetime in ISO8601 format.
|
||||
// It assumes that "YYYY-" has already been consumed.
|
||||
func lexDateAfterYear(lx *lexer) stateFn {
|
||||
formats := []rune{
|
||||
// digits are '0'.
|
||||
// everything else is direct equality.
|
||||
'0', '0', '-', '0', '0',
|
||||
'T',
|
||||
'0', '0', ':', '0', '0', ':', '0', '0',
|
||||
'Z',
|
||||
}
|
||||
for _, f := range formats {
|
||||
r := lx.next()
|
||||
if f == '0' {
|
||||
if !isDigit(r) {
|
||||
return lx.errorf("Expected digit in ISO8601 datetime, "+
|
||||
"but found %q instead.", r)
|
||||
}
|
||||
} else if f != r {
|
||||
return lx.errorf("Expected %q in ISO8601 datetime, "+
|
||||
"but found %q instead.", f, r)
|
||||
}
|
||||
}
|
||||
lx.emit(itemDatetime)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexNumberStart consumes either an integer or a float. It assumes that a
|
||||
// negative sign has already been read, but that *no* digits have been consumed.
|
||||
// lexNumberStart will move to the appropriate integer or float states.
|
||||
func lexNumberStart(lx *lexer) stateFn {
|
||||
// we MUST see a digit. Even floats have to start with a digit.
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
if r == '.' {
|
||||
return lx.errorf("Floats must start with a digit, not '.'.")
|
||||
} else {
|
||||
return lx.errorf("Expected a digit but got %q.", r)
|
||||
}
|
||||
}
|
||||
return lexNumber
|
||||
}
|
||||
|
||||
// lexNumber consumes an integer or a float after seeing the first digit.
|
||||
func lexNumber(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isDigit(r):
|
||||
return lexNumber
|
||||
case r == '.':
|
||||
return lexFloatStart
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexFloatStart starts the consumption of digits of a float after a '.'.
|
||||
// Namely, at least one digit is required.
|
||||
func lexFloatStart(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
return lx.errorf("Floats must have a digit after the '.', but got "+
|
||||
"%q instead.", r)
|
||||
}
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
// lexFloat consumes the digits of a float after a '.'.
|
||||
// Assumes that one digit has been consumed after a '.' already.
|
||||
func lexFloat(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemFloat)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexConst consumes the s[1:] in s. It assumes that s[0] has already been
|
||||
// consumed.
|
||||
func lexConst(lx *lexer, s string) stateFn {
|
||||
for i := range s[1:] {
|
||||
if r := lx.next(); r != rune(s[i+1]) {
|
||||
return lx.errorf("Expected %q, but found %q instead.", s[:i+1],
|
||||
s[:i]+string(r))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// lexTrue consumes the "rue" in "true". It assumes that 't' has already
|
||||
// been consumed.
|
||||
func lexTrue(lx *lexer) stateFn {
|
||||
if fn := lexConst(lx, "true"); fn != nil {
|
||||
return fn
|
||||
}
|
||||
lx.emit(itemBool)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexFalse consumes the "alse" in "false". It assumes that 'f' has already
|
||||
// been consumed.
|
||||
func lexFalse(lx *lexer) stateFn {
|
||||
if fn := lexConst(lx, "false"); fn != nil {
|
||||
return fn
|
||||
}
|
||||
lx.emit(itemBool)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexCommentStart begins the lexing of a comment. It will emit
|
||||
// itemCommentStart and consume no characters, passing control to lexComment.
|
||||
func lexCommentStart(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemCommentStart)
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexComment lexes an entire comment. It assumes that '#' has been consumed.
|
||||
// It will consume *up to* the first new line character, and pass control
|
||||
// back to the last state on the stack.
|
||||
func lexComment(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
if isNL(r) || r == eof {
|
||||
lx.emit(itemText)
|
||||
return lx.pop()
|
||||
}
|
||||
lx.next()
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexSkip ignores all slurped input and moves on to the next state.
|
||||
func lexSkip(lx *lexer, nextState stateFn) stateFn {
|
||||
return func(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
return nextState
|
||||
}
|
||||
}
|
||||
|
||||
// isWhitespace returns true if `r` is a whitespace character according
|
||||
// to the spec.
|
||||
func isWhitespace(r rune) bool {
|
||||
return r == '\t' || r == ' '
|
||||
}
|
||||
|
||||
func isNL(r rune) bool {
|
||||
return r == '\n' || r == '\r'
|
||||
}
|
||||
|
||||
func isDigit(r rune) bool {
|
||||
return r >= '0' && r <= '9'
|
||||
}
|
||||
|
||||
func isHexadecimal(r rune) bool {
|
||||
return (r >= '0' && r <= '9') ||
|
||||
(r >= 'a' && r <= 'f') ||
|
||||
(r >= 'A' && r <= 'F')
|
||||
}
|
||||
|
||||
func (itype itemType) String() string {
|
||||
switch itype {
|
||||
case itemError:
|
||||
return "Error"
|
||||
case itemNIL:
|
||||
return "NIL"
|
||||
case itemEOF:
|
||||
return "EOF"
|
||||
case itemText:
|
||||
return "Text"
|
||||
case itemString:
|
||||
return "String"
|
||||
case itemBool:
|
||||
return "Bool"
|
||||
case itemInteger:
|
||||
return "Integer"
|
||||
case itemFloat:
|
||||
return "Float"
|
||||
case itemDatetime:
|
||||
return "DateTime"
|
||||
case itemTableStart:
|
||||
return "TableStart"
|
||||
case itemTableEnd:
|
||||
return "TableEnd"
|
||||
case itemKeyStart:
|
||||
return "KeyStart"
|
||||
case itemArray:
|
||||
return "Array"
|
||||
case itemArrayEnd:
|
||||
return "ArrayEnd"
|
||||
case itemCommentStart:
|
||||
return "CommentStart"
|
||||
}
|
||||
panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype)))
|
||||
}
|
||||
|
||||
func (item item) String() string {
|
||||
return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val)
|
||||
}
|
417
Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go
generated
vendored
Normal file
417
Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go
generated
vendored
Normal file
|
@ -0,0 +1,417 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type parser struct {
|
||||
mapping map[string]interface{}
|
||||
types map[string]tomlType
|
||||
lx *lexer
|
||||
|
||||
// A list of keys in the order that they appear in the TOML data.
|
||||
ordered []Key
|
||||
|
||||
// the full key for the current hash in scope
|
||||
context Key
|
||||
|
||||
// the base key name for everything except hashes
|
||||
currentKey string
|
||||
|
||||
// rough approximation of line number
|
||||
approxLine int
|
||||
|
||||
// A map of 'key.group.names' to whether they were created implicitly.
|
||||
implicits map[string]bool
|
||||
}
|
||||
|
||||
type parseError string
|
||||
|
||||
func (pe parseError) Error() string {
|
||||
return string(pe)
|
||||
}
|
||||
|
||||
func parse(data string) (p *parser, err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
var ok bool
|
||||
if err, ok = r.(parseError); ok {
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
|
||||
p = &parser{
|
||||
mapping: make(map[string]interface{}),
|
||||
types: make(map[string]tomlType),
|
||||
lx: lex(data),
|
||||
ordered: make([]Key, 0),
|
||||
implicits: make(map[string]bool),
|
||||
}
|
||||
for {
|
||||
item := p.next()
|
||||
if item.typ == itemEOF {
|
||||
break
|
||||
}
|
||||
p.topLevel(item)
|
||||
}
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (p *parser) panicf(format string, v ...interface{}) {
|
||||
msg := fmt.Sprintf("Near line %d, key '%s': %s",
|
||||
p.approxLine, p.current(), fmt.Sprintf(format, v...))
|
||||
panic(parseError(msg))
|
||||
}
|
||||
|
||||
func (p *parser) next() item {
|
||||
it := p.lx.nextItem()
|
||||
if it.typ == itemError {
|
||||
p.panicf("Near line %d: %s", it.line, it.val)
|
||||
}
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) bug(format string, v ...interface{}) {
|
||||
log.Fatalf("BUG: %s\n\n", fmt.Sprintf(format, v...))
|
||||
}
|
||||
|
||||
func (p *parser) expect(typ itemType) item {
|
||||
it := p.next()
|
||||
p.assertEqual(typ, it.typ)
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) assertEqual(expected, got itemType) {
|
||||
if expected != got {
|
||||
p.bug("Expected '%s' but got '%s'.", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) topLevel(item item) {
|
||||
switch item.typ {
|
||||
case itemCommentStart:
|
||||
p.approxLine = item.line
|
||||
p.expect(itemText)
|
||||
case itemTableStart:
|
||||
kg := p.expect(itemText)
|
||||
p.approxLine = kg.line
|
||||
|
||||
key := make(Key, 0)
|
||||
for ; kg.typ == itemText; kg = p.next() {
|
||||
key = append(key, kg.val)
|
||||
}
|
||||
p.assertEqual(itemTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, false)
|
||||
p.setType("", tomlHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemArrayTableStart:
|
||||
kg := p.expect(itemText)
|
||||
p.approxLine = kg.line
|
||||
|
||||
key := make(Key, 0)
|
||||
for ; kg.typ == itemText; kg = p.next() {
|
||||
key = append(key, kg.val)
|
||||
}
|
||||
p.assertEqual(itemArrayTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, true)
|
||||
p.setType("", tomlArrayHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemKeyStart:
|
||||
kname := p.expect(itemText)
|
||||
p.currentKey = kname.val
|
||||
p.approxLine = kname.line
|
||||
|
||||
val, typ := p.value(p.next())
|
||||
p.setValue(p.currentKey, val)
|
||||
p.setType(p.currentKey, typ)
|
||||
p.ordered = append(p.ordered, p.context.add(p.currentKey))
|
||||
|
||||
p.currentKey = ""
|
||||
default:
|
||||
p.bug("Unexpected type at top level: %s", item.typ)
|
||||
}
|
||||
}
|
||||
|
||||
// value translates an expected value from the lexer into a Go value wrapped
|
||||
// as an empty interface.
|
||||
func (p *parser) value(it item) (interface{}, tomlType) {
|
||||
switch it.typ {
|
||||
case itemString:
|
||||
return p.replaceUnicode(replaceEscapes(it.val)), p.typeOfPrimitive(it)
|
||||
case itemBool:
|
||||
switch it.val {
|
||||
case "true":
|
||||
return true, p.typeOfPrimitive(it)
|
||||
case "false":
|
||||
return false, p.typeOfPrimitive(it)
|
||||
}
|
||||
p.bug("Expected boolean value, but got '%s'.", it.val)
|
||||
case itemInteger:
|
||||
num, err := strconv.ParseInt(it.val, 10, 64)
|
||||
if err != nil {
|
||||
// See comment below for floats describing why we make a
|
||||
// distinction between a bug and a user error.
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Integer '%s' is out of the range of 64-bit "+
|
||||
"signed integers.", it.val)
|
||||
} else {
|
||||
p.bug("Expected integer value, but got '%s'.", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemFloat:
|
||||
num, err := strconv.ParseFloat(it.val, 64)
|
||||
if err != nil {
|
||||
// Distinguish float values. Normally, it'd be a bug if the lexer
|
||||
// provides an invalid float, but it's possible that the float is
|
||||
// out of range of valid values (which the lexer cannot determine).
|
||||
// So mark the former as a bug but the latter as a legitimate user
|
||||
// error.
|
||||
//
|
||||
// This is also true for integers.
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Float '%s' is out of the range of 64-bit "+
|
||||
"IEEE-754 floating-point numbers.", it.val)
|
||||
} else {
|
||||
p.bug("Expected float value, but got '%s'.", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemDatetime:
|
||||
t, err := time.Parse("2006-01-02T15:04:05Z", it.val)
|
||||
if err != nil {
|
||||
p.bug("Expected Zulu formatted DateTime, but got '%s'.", it.val)
|
||||
}
|
||||
return t, p.typeOfPrimitive(it)
|
||||
case itemArray:
|
||||
array := make([]interface{}, 0)
|
||||
types := make([]tomlType, 0)
|
||||
|
||||
for it = p.next(); it.typ != itemArrayEnd; it = p.next() {
|
||||
if it.typ == itemCommentStart {
|
||||
p.expect(itemText)
|
||||
continue
|
||||
}
|
||||
|
||||
val, typ := p.value(it)
|
||||
array = append(array, val)
|
||||
types = append(types, typ)
|
||||
}
|
||||
return array, p.typeOfArray(types)
|
||||
}
|
||||
p.bug("Unexpected value type: %s", it.typ)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// establishContext sets the current context of the parser,
|
||||
// where the context is either a hash or an array of hashes. Which one is
|
||||
// set depends on the value of the `array` parameter.
|
||||
//
|
||||
// Establishing the context also makes sure that the key isn't a duplicate, and
|
||||
// will create implicit hashes automatically.
|
||||
func (p *parser) establishContext(key Key, array bool) {
|
||||
var ok bool
|
||||
|
||||
// Always start at the top level and drill down for our context.
|
||||
hashContext := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
|
||||
// We only need implicit hashes for key[0:-1]
|
||||
for _, k := range key[0 : len(key)-1] {
|
||||
_, ok = hashContext[k]
|
||||
keyContext = append(keyContext, k)
|
||||
|
||||
// No key? Make an implicit hash and move on.
|
||||
if !ok {
|
||||
p.addImplicit(keyContext)
|
||||
hashContext[k] = make(map[string]interface{})
|
||||
}
|
||||
|
||||
// If the hash context is actually an array of tables, then set
|
||||
// the hash context to the last element in that array.
|
||||
//
|
||||
// Otherwise, it better be a table, since this MUST be a key group (by
|
||||
// virtue of it not being the last element in a key).
|
||||
switch t := hashContext[k].(type) {
|
||||
case []map[string]interface{}:
|
||||
hashContext = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hashContext = t
|
||||
default:
|
||||
p.panicf("Key '%s' was already created as a hash.", keyContext)
|
||||
}
|
||||
}
|
||||
|
||||
p.context = keyContext
|
||||
if array {
|
||||
// If this is the first element for this array, then allocate a new
|
||||
// list of tables for it.
|
||||
k := key[len(key)-1]
|
||||
if _, ok := hashContext[k]; !ok {
|
||||
hashContext[k] = make([]map[string]interface{}, 0, 5)
|
||||
}
|
||||
|
||||
// Add a new table. But make sure the key hasn't already been used
|
||||
// for something else.
|
||||
if hash, ok := hashContext[k].([]map[string]interface{}); ok {
|
||||
hashContext[k] = append(hash, make(map[string]interface{}))
|
||||
} else {
|
||||
p.panicf("Key '%s' was already created and cannot be used as "+
|
||||
"an array.", keyContext)
|
||||
}
|
||||
} else {
|
||||
p.setValue(key[len(key)-1], make(map[string]interface{}))
|
||||
}
|
||||
p.context = append(p.context, key[len(key)-1])
|
||||
}
|
||||
|
||||
// setValue sets the given key to the given value in the current context.
|
||||
// It will make sure that the key hasn't already been defined, account for
|
||||
// implicit key groups.
|
||||
func (p *parser) setValue(key string, value interface{}) {
|
||||
var tmpHash interface{}
|
||||
var ok bool
|
||||
|
||||
hash := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
if tmpHash, ok = hash[k]; !ok {
|
||||
p.bug("Context for key '%s' has not been established.", keyContext)
|
||||
}
|
||||
switch t := tmpHash.(type) {
|
||||
case []map[string]interface{}:
|
||||
// The context is a table of hashes. Pick the most recent table
|
||||
// defined as the current hash.
|
||||
hash = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hash = t
|
||||
default:
|
||||
p.bug("Expected hash to have type 'map[string]interface{}', but "+
|
||||
"it has '%T' instead.", tmpHash)
|
||||
}
|
||||
}
|
||||
keyContext = append(keyContext, key)
|
||||
|
||||
if _, ok := hash[key]; ok {
|
||||
// Typically, if the given key has already been set, then we have
|
||||
// to raise an error since duplicate keys are disallowed. However,
|
||||
// it's possible that a key was previously defined implicitly. In this
|
||||
// case, it is allowed to be redefined concretely. (See the
|
||||
// `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.)
|
||||
//
|
||||
// But we have to make sure to stop marking it as an implicit. (So that
|
||||
// another redefinition provokes an error.)
|
||||
//
|
||||
// Note that since it has already been defined (as a hash), we don't
|
||||
// want to overwrite it. So our business is done.
|
||||
if p.isImplicit(keyContext) {
|
||||
p.removeImplicit(keyContext)
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise, we have a concrete key trying to override a previous
|
||||
// key, which is *always* wrong.
|
||||
p.panicf("Key '%s' has already been defined.", keyContext)
|
||||
}
|
||||
hash[key] = value
|
||||
}
|
||||
|
||||
// setType sets the type of a particular value at a given key.
|
||||
// It should be called immediately AFTER setValue.
|
||||
//
|
||||
// Note that if `key` is empty, then the type given will be applied to the
|
||||
// current context (which is either a table or an array of tables).
|
||||
func (p *parser) setType(key string, typ tomlType) {
|
||||
keyContext := make(Key, 0, len(p.context)+1)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
}
|
||||
if len(key) > 0 { // allow type setting for hashes
|
||||
keyContext = append(keyContext, key)
|
||||
}
|
||||
p.types[keyContext.String()] = typ
|
||||
}
|
||||
|
||||
// addImplicit sets the given Key as having been created implicitly.
|
||||
func (p *parser) addImplicit(key Key) {
|
||||
p.implicits[key.String()] = true
|
||||
}
|
||||
|
||||
// removeImplicit stops tagging the given key as having been implicitly created.
|
||||
func (p *parser) removeImplicit(key Key) {
|
||||
p.implicits[key.String()] = false
|
||||
}
|
||||
|
||||
// isImplicit returns true if the key group pointed to by the key was created
|
||||
// implicitly.
|
||||
func (p *parser) isImplicit(key Key) bool {
|
||||
return p.implicits[key.String()]
|
||||
}
|
||||
|
||||
// current returns the full key name of the current context.
|
||||
func (p *parser) current() string {
|
||||
if len(p.currentKey) == 0 {
|
||||
return p.context.String()
|
||||
}
|
||||
if len(p.context) == 0 {
|
||||
return p.currentKey
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", p.context, p.currentKey)
|
||||
}
|
||||
|
||||
func replaceEscapes(s string) string {
|
||||
return strings.NewReplacer(
|
||||
"\\b", "\u0008",
|
||||
"\\t", "\u0009",
|
||||
"\\n", "\u000A",
|
||||
"\\f", "\u000C",
|
||||
"\\r", "\u000D",
|
||||
"\\\"", "\u0022",
|
||||
"\\/", "\u002F",
|
||||
"\\\\", "\u005C",
|
||||
).Replace(s)
|
||||
}
|
||||
|
||||
func (p *parser) replaceUnicode(s string) string {
|
||||
indexEsc := func() int {
|
||||
return strings.Index(s, "\\u")
|
||||
}
|
||||
for i := indexEsc(); i != -1; i = indexEsc() {
|
||||
asciiBytes := s[i+2 : i+6]
|
||||
s = strings.Replace(s, s[i:i+6], p.asciiEscapeToUnicode(asciiBytes), -1)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (p *parser) asciiEscapeToUnicode(s string) string {
|
||||
hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32)
|
||||
if err != nil {
|
||||
p.bug("Could not parse '%s' as a hexadecimal number, but the "+
|
||||
"lexer claims it's OK: %s", s, err)
|
||||
}
|
||||
|
||||
// BUG(burntsushi)
|
||||
// I honestly don't understand how this works. I can't seem
|
||||
// to find a way to make this fail. I figured this would fail on invalid
|
||||
// UTF-8 characters like U+DCFF, but it doesn't.
|
||||
r := string(rune(hex))
|
||||
if !utf8.ValidString(r) {
|
||||
p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s)
|
||||
}
|
||||
return string(r)
|
||||
}
|
1
Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
au BufWritePost *.go silent!make tags > /dev/null 2>&1
|
85
Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go
generated
vendored
Normal file
85
Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go
generated
vendored
Normal file
|
@ -0,0 +1,85 @@
|
|||
package toml
|
||||
|
||||
// tomlType represents any Go type that corresponds to a TOML type.
|
||||
// While the first draft of the TOML spec has a simplistic type system that
|
||||
// probably doesn't need this level of sophistication, we seem to be militating
|
||||
// toward adding real composite types.
|
||||
type tomlType interface {
|
||||
typeString() string
|
||||
}
|
||||
|
||||
// typeEqual accepts any two types and returns true if they are equal.
|
||||
func typeEqual(t1, t2 tomlType) bool {
|
||||
if t1 == nil || t2 == nil {
|
||||
return false
|
||||
}
|
||||
return t1.typeString() == t2.typeString()
|
||||
}
|
||||
|
||||
func typeIsHash(t tomlType) bool {
|
||||
return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash)
|
||||
}
|
||||
|
||||
type tomlBaseType string
|
||||
|
||||
func (btype tomlBaseType) typeString() string {
|
||||
return string(btype)
|
||||
}
|
||||
|
||||
func (btype tomlBaseType) String() string {
|
||||
return btype.typeString()
|
||||
}
|
||||
|
||||
var (
|
||||
tomlInteger tomlBaseType = "Integer"
|
||||
tomlFloat tomlBaseType = "Float"
|
||||
tomlDatetime tomlBaseType = "Datetime"
|
||||
tomlString tomlBaseType = "String"
|
||||
tomlBool tomlBaseType = "Bool"
|
||||
tomlArray tomlBaseType = "Array"
|
||||
tomlHash tomlBaseType = "Hash"
|
||||
tomlArrayHash tomlBaseType = "ArrayHash"
|
||||
)
|
||||
|
||||
// typeOfPrimitive returns a tomlType of any primitive value in TOML.
|
||||
// Primitive values are: Integer, Float, Datetime, String and Bool.
|
||||
//
|
||||
// Passing a lexer item other than the following will cause a BUG message
|
||||
// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime.
|
||||
func (p *parser) typeOfPrimitive(lexItem item) tomlType {
|
||||
switch lexItem.typ {
|
||||
case itemInteger:
|
||||
return tomlInteger
|
||||
case itemFloat:
|
||||
return tomlFloat
|
||||
case itemDatetime:
|
||||
return tomlDatetime
|
||||
case itemString:
|
||||
return tomlString
|
||||
case itemBool:
|
||||
return tomlBool
|
||||
}
|
||||
p.bug("Cannot infer primitive type of lex item '%s'.", lexItem)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// typeOfArray returns a tomlType for an array given a list of types of its
|
||||
// values.
|
||||
//
|
||||
// In the current spec, if an array is homogeneous, then its type is always
|
||||
// "Array". If the array is not homogeneous, an error is generated.
|
||||
func (p *parser) typeOfArray(types []tomlType) tomlType {
|
||||
// Empty arrays are cool.
|
||||
if len(types) == 0 {
|
||||
return tomlArray
|
||||
}
|
||||
|
||||
theType := types[0]
|
||||
for _, t := range types[1:] {
|
||||
if !typeEqual(theType, t) {
|
||||
p.panicf("Array contains values of type '%s' and '%s', but arrays "+
|
||||
"must be homogeneous.", theType, t)
|
||||
}
|
||||
}
|
||||
return tomlArray
|
||||
}
|
241
Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go
generated
vendored
Normal file
241
Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go
generated
vendored
Normal file
|
@ -0,0 +1,241 @@
|
|||
package toml
|
||||
|
||||
// Struct field handling is adapted from code in encoding/json:
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the Go distribution.
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// A field represents a single field found in a struct.
|
||||
type field struct {
|
||||
name string // the name of the field (`toml` tag included)
|
||||
tag bool // whether field has a `toml` tag
|
||||
index []int // represents the depth of an anonymous field
|
||||
typ reflect.Type // the type of the field
|
||||
}
|
||||
|
||||
// byName sorts field by name, breaking ties with depth,
|
||||
// then breaking ties with "name came from toml tag", then
|
||||
// breaking ties with index sequence.
|
||||
type byName []field
|
||||
|
||||
func (x byName) Len() int { return len(x) }
|
||||
|
||||
func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byName) Less(i, j int) bool {
|
||||
if x[i].name != x[j].name {
|
||||
return x[i].name < x[j].name
|
||||
}
|
||||
if len(x[i].index) != len(x[j].index) {
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
if x[i].tag != x[j].tag {
|
||||
return x[i].tag
|
||||
}
|
||||
return byIndex(x).Less(i, j)
|
||||
}
|
||||
|
||||
// byIndex sorts field by index sequence.
|
||||
type byIndex []field
|
||||
|
||||
func (x byIndex) Len() int { return len(x) }
|
||||
|
||||
func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byIndex) Less(i, j int) bool {
|
||||
for k, xik := range x[i].index {
|
||||
if k >= len(x[j].index) {
|
||||
return false
|
||||
}
|
||||
if xik != x[j].index[k] {
|
||||
return xik < x[j].index[k]
|
||||
}
|
||||
}
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
|
||||
// typeFields returns a list of fields that TOML should recognize for the given
|
||||
// type. The algorithm is breadth-first search over the set of structs to
|
||||
// include - the top struct and then any reachable anonymous structs.
|
||||
func typeFields(t reflect.Type) []field {
|
||||
// Anonymous fields to explore at the current level and the next.
|
||||
current := []field{}
|
||||
next := []field{{typ: t}}
|
||||
|
||||
// Count of queued names for current level and the next.
|
||||
count := map[reflect.Type]int{}
|
||||
nextCount := map[reflect.Type]int{}
|
||||
|
||||
// Types already visited at an earlier level.
|
||||
visited := map[reflect.Type]bool{}
|
||||
|
||||
// Fields found.
|
||||
var fields []field
|
||||
|
||||
for len(next) > 0 {
|
||||
current, next = next, current[:0]
|
||||
count, nextCount = nextCount, map[reflect.Type]int{}
|
||||
|
||||
for _, f := range current {
|
||||
if visited[f.typ] {
|
||||
continue
|
||||
}
|
||||
visited[f.typ] = true
|
||||
|
||||
// Scan f.typ for fields to include.
|
||||
for i := 0; i < f.typ.NumField(); i++ {
|
||||
sf := f.typ.Field(i)
|
||||
if sf.PkgPath != "" { // unexported
|
||||
continue
|
||||
}
|
||||
name := sf.Tag.Get("toml")
|
||||
if name == "-" {
|
||||
continue
|
||||
}
|
||||
index := make([]int, len(f.index)+1)
|
||||
copy(index, f.index)
|
||||
index[len(f.index)] = i
|
||||
|
||||
ft := sf.Type
|
||||
if ft.Name() == "" && ft.Kind() == reflect.Ptr {
|
||||
// Follow pointer.
|
||||
ft = ft.Elem()
|
||||
}
|
||||
|
||||
// Record found field and index sequence.
|
||||
if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
||||
tagged := name != ""
|
||||
if name == "" {
|
||||
name = sf.Name
|
||||
}
|
||||
fields = append(fields, field{name, tagged, index, ft})
|
||||
if count[f.typ] > 1 {
|
||||
// If there were multiple instances, add a second,
|
||||
// so that the annihilation code will see a duplicate.
|
||||
// It only cares about the distinction between 1 or 2,
|
||||
// so don't bother generating any more copies.
|
||||
fields = append(fields, fields[len(fields)-1])
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Record new anonymous struct to explore in next round.
|
||||
nextCount[ft]++
|
||||
if nextCount[ft] == 1 {
|
||||
f := field{name: ft.Name(), index: index, typ: ft}
|
||||
next = append(next, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Sort(byName(fields))
|
||||
|
||||
// Delete all fields that are hidden by the Go rules for embedded fields,
|
||||
// except that fields with TOML tags are promoted.
|
||||
|
||||
// The fields are sorted in primary order of name, secondary order
|
||||
// of field index length. Loop over names; for each name, delete
|
||||
// hidden fields by choosing the one dominant field that survives.
|
||||
out := fields[:0]
|
||||
for advance, i := 0, 0; i < len(fields); i += advance {
|
||||
// One iteration per name.
|
||||
// Find the sequence of fields with the name of this first field.
|
||||
fi := fields[i]
|
||||
name := fi.name
|
||||
for advance = 1; i+advance < len(fields); advance++ {
|
||||
fj := fields[i+advance]
|
||||
if fj.name != name {
|
||||
break
|
||||
}
|
||||
}
|
||||
if advance == 1 { // Only one field with this name
|
||||
out = append(out, fi)
|
||||
continue
|
||||
}
|
||||
dominant, ok := dominantField(fields[i : i+advance])
|
||||
if ok {
|
||||
out = append(out, dominant)
|
||||
}
|
||||
}
|
||||
|
||||
fields = out
|
||||
sort.Sort(byIndex(fields))
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
// dominantField looks through the fields, all of which are known to
|
||||
// have the same name, to find the single field that dominates the
|
||||
// others using Go's embedding rules, modified by the presence of
|
||||
// TOML tags. If there are multiple top-level fields, the boolean
|
||||
// will be false: This condition is an error in Go and we skip all
|
||||
// the fields.
|
||||
func dominantField(fields []field) (field, bool) {
|
||||
// The fields are sorted in increasing index-length order. The winner
|
||||
// must therefore be one with the shortest index length. Drop all
|
||||
// longer entries, which is easy: just truncate the slice.
|
||||
length := len(fields[0].index)
|
||||
tagged := -1 // Index of first tagged field.
|
||||
for i, f := range fields {
|
||||
if len(f.index) > length {
|
||||
fields = fields[:i]
|
||||
break
|
||||
}
|
||||
if f.tag {
|
||||
if tagged >= 0 {
|
||||
// Multiple tagged fields at the same level: conflict.
|
||||
// Return no field.
|
||||
return field{}, false
|
||||
}
|
||||
tagged = i
|
||||
}
|
||||
}
|
||||
if tagged >= 0 {
|
||||
return fields[tagged], true
|
||||
}
|
||||
// All remaining fields have the same length. If there's more than one,
|
||||
// we have a conflict (two fields named "X" at the same level) and we
|
||||
// return no field.
|
||||
if len(fields) > 1 {
|
||||
return field{}, false
|
||||
}
|
||||
return fields[0], true
|
||||
}
|
||||
|
||||
var fieldCache struct {
|
||||
sync.RWMutex
|
||||
m map[reflect.Type][]field
|
||||
}
|
||||
|
||||
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
|
||||
func cachedTypeFields(t reflect.Type) []field {
|
||||
fieldCache.RLock()
|
||||
f := fieldCache.m[t]
|
||||
fieldCache.RUnlock()
|
||||
if f != nil {
|
||||
return f
|
||||
}
|
||||
|
||||
// Compute fields without lock.
|
||||
// Might duplicate effort but won't hold other computations back.
|
||||
f = typeFields(t)
|
||||
if f == nil {
|
||||
f = []field{}
|
||||
}
|
||||
|
||||
fieldCache.Lock()
|
||||
if fieldCache.m == nil {
|
||||
fieldCache.m = map[reflect.Type][]field{}
|
||||
}
|
||||
fieldCache.m[t] = f
|
||||
fieldCache.Unlock()
|
||||
return f
|
||||
}
|
3
Godeps/_workspace/src/github.com/BurntSushi/ty/.gitignore
generated
vendored
Normal file
3
Godeps/_workspace/src/github.com/BurntSushi/ty/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
TAGS
|
||||
tags
|
||||
.*.swp
|
13
Godeps/_workspace/src/github.com/BurntSushi/ty/COPYING
generated
vendored
Normal file
13
Godeps/_workspace/src/github.com/BurntSushi/ty/COPYING
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
28
Godeps/_workspace/src/github.com/BurntSushi/ty/Makefile
generated
vendored
Normal file
28
Godeps/_workspace/src/github.com/BurntSushi/ty/Makefile
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
all: install
|
||||
|
||||
install:
|
||||
go install ./...
|
||||
|
||||
test: install
|
||||
go test ./...
|
||||
|
||||
benchcmp: install
|
||||
cd fun \
|
||||
&& echo "Running reflection benchmarks..." \
|
||||
&& go test -cpu 12 -run NONE -benchmem -bench . > reflect.bench \
|
||||
&& echo "Running built in benchmarks..." \
|
||||
&& go test -cpu 12 -run NONE -benchmem -bench . -builtin > builtin.bench \
|
||||
&& benchcmp builtin.bench reflect.bench > ../perf/cmp.bench \
|
||||
&& rm builtin.bench reflect.bench
|
||||
|
||||
fmt:
|
||||
gofmt -w *.go */*.go
|
||||
colcheck *.go */*.go
|
||||
|
||||
tags:
|
||||
find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS
|
||||
|
||||
push:
|
||||
git push origin master
|
||||
git push github master
|
||||
|
119
Godeps/_workspace/src/github.com/BurntSushi/ty/README.md
generated
vendored
Normal file
119
Godeps/_workspace/src/github.com/BurntSushi/ty/README.md
generated
vendored
Normal file
|
@ -0,0 +1,119 @@
|
|||
Package `ty` provides utilities for writing type parametric functions with run
|
||||
time type safety.
|
||||
|
||||
This package contains two sub-packages `fun` and `data` which define some
|
||||
potentially useful functions and abstractions using the type checker in
|
||||
this package.
|
||||
|
||||
## Requirements
|
||||
|
||||
Go tip (or 1.1 when it's released) is required. This package will not work
|
||||
with Go 1.0.x or earlier.
|
||||
|
||||
The very foundation of this package only recently became possible with the
|
||||
addition of 3 new functions in the standard library `reflect` package:
|
||||
SliceOf, MapOf and ChanOf. In particular, it provides the ability to
|
||||
dynamically construct types at run time from component types.
|
||||
|
||||
Further extensions to this package can be made if similar functions are added
|
||||
for structs and functions(?).
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/ty
|
||||
go get github.com/BurntSushi/ty/fun
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
Squaring each integer in a slice:
|
||||
|
||||
```go
|
||||
square := func(x int) int { return x * x }
|
||||
nums := []int{1, 2, 3, 4, 5}
|
||||
squares := Map(square, nums).([]int)
|
||||
```
|
||||
|
||||
Reversing any slice:
|
||||
|
||||
```go
|
||||
slice := []string{"a", "b", "c"}
|
||||
reversed := Reverse(slice).([]string)
|
||||
```
|
||||
|
||||
Sorting any slice:
|
||||
|
||||
```go
|
||||
// Sort a slice of structs with first class functions.
|
||||
type Album struct {
|
||||
Title string
|
||||
Year int
|
||||
}
|
||||
albums := []Album{
|
||||
{"Born to Run", 1975},
|
||||
{"WIESS", 1973},
|
||||
{"Darkness", 1978},
|
||||
{"Greetings", 1973},
|
||||
}
|
||||
|
||||
less := func(a, b Album) bool { return a.Year < b.Year },
|
||||
sorted := QuickSort(less, albums).([]Album)
|
||||
```
|
||||
|
||||
Parallel map:
|
||||
|
||||
```go
|
||||
// Compute the prime factorization concurrently
|
||||
// for every integer in [1000, 10000].
|
||||
primeFactors := func(n int) []int { // compute prime factors }
|
||||
factors := ParMap(primeFactors, Range(1000, 10001)).([]int)
|
||||
```
|
||||
|
||||
Asynchronous channel without a fixed size buffer:
|
||||
|
||||
```go
|
||||
s, r := AsyncChan(new(chan int))
|
||||
send, recv := s.(chan<- int), r.(<-chan int)
|
||||
|
||||
// Send as much as you want.
|
||||
for i := 0; i < 100; i++ {
|
||||
s <- i
|
||||
}
|
||||
close(s)
|
||||
for i := range recv {
|
||||
// do something with `i`
|
||||
}
|
||||
```
|
||||
|
||||
Shuffle any slice in place:
|
||||
|
||||
```go
|
||||
jumbleMe := []string{"The", "quick", "brown", "fox"}
|
||||
Shuffle(jumbleMe)
|
||||
```
|
||||
|
||||
Function memoization:
|
||||
|
||||
```go
|
||||
// Memoizing a recursive function like `fibonacci`.
|
||||
// Write it like normal:
|
||||
var fib func(n int64) int64
|
||||
fib = func(n int64) int64 {
|
||||
switch n {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return 1
|
||||
}
|
||||
return fib(n - 1) + fib(n - 2)
|
||||
}
|
||||
|
||||
// And wrap it with `Memo`.
|
||||
fib = Memo(fib).(func(int64) int64)
|
||||
|
||||
// Will keep your CPU busy for a long time
|
||||
// without memoization.
|
||||
fmt.Println(fib(80))
|
||||
```
|
||||
|
82
Godeps/_workspace/src/github.com/BurntSushi/ty/data/doc.go
generated
vendored
Normal file
82
Godeps/_workspace/src/github.com/BurntSushi/ty/data/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
Package data tumbles down the rabbit hole into parametric data types.
|
||||
|
||||
A parametric data type in this package is a data type that is parameterized by
|
||||
one or more types discovered at run time. For example, an ordered map is
|
||||
parameterized by two types: the type of its keys and the type of its values.
|
||||
|
||||
Implementation
|
||||
|
||||
While all parametric inputs and outputs of each function have a Go type of
|
||||
`interface{}`, the underlying type is maintained via reflection. In particular,
|
||||
any operation that interacts with a parametric data type does so via
|
||||
reflection so that the type safety found in Go at compile time can be
|
||||
recovered at run time.
|
||||
|
||||
For example, consider the case of an ordered map. One might define such a map
|
||||
as a list of its keys in order and a map of `interface{}` to `interface{}`:
|
||||
|
||||
type OrdMap struct {
|
||||
M map[interface{}]interface{}
|
||||
Keys []interface{}
|
||||
}
|
||||
|
||||
And one can interact with this map using standard built-in Go operations:
|
||||
|
||||
// Add a key
|
||||
M["key"] = "value"
|
||||
Keys = append(Keys, "key")
|
||||
|
||||
// Delete a key
|
||||
delete(M, "key")
|
||||
|
||||
// Read a key
|
||||
M["key"]
|
||||
|
||||
But there is no type safety with such a representation, even at run time:
|
||||
|
||||
// Both of these operations are legal with
|
||||
// the aforementioned representation.
|
||||
M["key"] = "value"
|
||||
M[5] = true
|
||||
|
||||
Thus, the contribution of this library is to maintain type safety at run time
|
||||
by guaranteeing that all operations are consistent with Go typing rules:
|
||||
|
||||
type OrdMap struct {
|
||||
M reflect.Value
|
||||
Keys reflect.Value
|
||||
}
|
||||
|
||||
And one must interact with a map using `reflect`:
|
||||
|
||||
key, val := "key", "value"
|
||||
rkey := reflect.ValueOf(key)
|
||||
rval := reflect.ValueOf(val)
|
||||
|
||||
// Add a key
|
||||
M.SetMapIndex(rkey, rval)
|
||||
Keys = reflect.Append(Keys, rkey)
|
||||
|
||||
// Delete a key
|
||||
M.SetMapIndex(rkey, reflect.Value{})
|
||||
|
||||
// Read a key
|
||||
M.MapIndex(rkey)
|
||||
|
||||
Which guarantees, at run-time, that the following cannot happen:
|
||||
|
||||
key2, val2 := 5, true
|
||||
rkey2 := reflect.ValueOf(key2)
|
||||
rval2 := reflect.ValueOf(val2)
|
||||
|
||||
// One or the other operation will be disallowed,
|
||||
// assuming `OrdMap` isn't instantiated with
|
||||
// `interface{}` as the key and value type.
|
||||
M.SetMapIndex(rkey, rval)
|
||||
M.SetMapIndex(rkey2, rval2)
|
||||
|
||||
The result is much more painful library code but only slightly more painful
|
||||
client code.
|
||||
*/
|
||||
package data
|
175
Godeps/_workspace/src/github.com/BurntSushi/ty/data/ordmap.go
generated
vendored
Normal file
175
Godeps/_workspace/src/github.com/BurntSushi/ty/data/ordmap.go
generated
vendored
Normal file
|
@ -0,0 +1,175 @@
|
|||
package data
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"github.com/BurntSushi/ty"
|
||||
)
|
||||
|
||||
// OrdMap has a parametric type `OrdMap<K, V>` where `K` is the type
|
||||
// of the map's keys and `V` is the type of the map's values.
|
||||
type OrdMap struct {
|
||||
m reflect.Value
|
||||
keys reflect.Value
|
||||
ktype, vtype reflect.Type
|
||||
}
|
||||
|
||||
// OrderedMap returns a new instance of OrdMap instantiated with the key
|
||||
// and value types given. Namely, the types should be provided via nil
|
||||
// pointers, e.g., to create a map from strings to integers:
|
||||
//
|
||||
// omap := OrderedMap(new(string), new(int))
|
||||
//
|
||||
// An ordered map maintains the insertion order of all keys in the map.
|
||||
// Namely, `(*OrdMap).Keys()` returns a slice of keys in the order
|
||||
// they were inserted. The order of a key can *only* be changed if it is
|
||||
// deleted and added again.
|
||||
//
|
||||
// All of the operations on an ordered map have the same time complexity as
|
||||
// the built-in `map`, except for `Delete` which is O(n) in the number of
|
||||
// keys.
|
||||
func OrderedMap(ktype, vtype interface{}) *OrdMap {
|
||||
// A giant hack to get `Check` to do all the type construction work for us.
|
||||
chk := ty.Check(
|
||||
new(func(*ty.A, *ty.B) (ty.A, ty.B, map[ty.A]ty.B, []ty.A)),
|
||||
ktype, vtype)
|
||||
tkey, tval := chk.Returns[0], chk.Returns[1]
|
||||
tmap, tkeys := chk.Returns[2], chk.Returns[3]
|
||||
|
||||
return &OrdMap{
|
||||
m: reflect.MakeMap(tmap),
|
||||
keys: reflect.MakeSlice(tkeys, 0, 10),
|
||||
ktype: tkey,
|
||||
vtype: tval,
|
||||
}
|
||||
}
|
||||
|
||||
// Exists has a parametric type:
|
||||
//
|
||||
// func (om *OrdMap<K, V>) Exists(key K) bool
|
||||
//
|
||||
// Exists returns true if `key` is in the map `om`.
|
||||
func (om *OrdMap) Exists(key interface{}) bool {
|
||||
rkey := ty.AssertType(key, om.ktype)
|
||||
return om.exists(rkey)
|
||||
}
|
||||
|
||||
func (om *OrdMap) exists(rkey reflect.Value) bool {
|
||||
return om.m.MapIndex(rkey).IsValid()
|
||||
}
|
||||
|
||||
// Put has a parametric type:
|
||||
//
|
||||
// func (om *OrdMap<K, V>) Put(key K, val V)
|
||||
//
|
||||
// Put adds or overwrites `key` into the map `om` with value `val`.
|
||||
// If `key` already exists in the map, then its position in the ordering
|
||||
// of the map is not changed.
|
||||
func (om *OrdMap) Put(key, val interface{}) {
|
||||
rkey := ty.AssertType(key, om.ktype)
|
||||
rval := ty.AssertType(val, om.vtype)
|
||||
if !om.exists(rkey) {
|
||||
om.keys = reflect.Append(om.keys, rkey)
|
||||
}
|
||||
om.m.SetMapIndex(rkey, rval)
|
||||
}
|
||||
|
||||
// Get has a parametric type:
|
||||
//
|
||||
// func (om *OrdMap<K, V>) Get(key K) V
|
||||
//
|
||||
// Get retrieves the value in the map `om` corresponding to `key`. If the
|
||||
// value does not exist, then the zero value of type `V` is returned.
|
||||
func (om *OrdMap) Get(key interface{}) interface{} {
|
||||
rkey := ty.AssertType(key, om.ktype)
|
||||
rval := om.m.MapIndex(rkey)
|
||||
if !rval.IsValid() {
|
||||
return om.zeroValue().Interface()
|
||||
}
|
||||
return rval.Interface()
|
||||
}
|
||||
|
||||
// TryGet has a parametric type:
|
||||
//
|
||||
// func (om *OrdMap<K, V>) TryGet(key K) (V, bool)
|
||||
//
|
||||
// TryGet retrieves the value in the map `om` corresponding to `key` and
|
||||
// reports whether the value exists in the map or not. If the value does
|
||||
// not exist, then the zero value of `V` and `false` are returned.
|
||||
func (om *OrdMap) TryGet(key interface{}) (interface{}, bool) {
|
||||
rkey := ty.AssertType(key, om.ktype)
|
||||
rval := om.m.MapIndex(rkey)
|
||||
if !rval.IsValid() {
|
||||
return om.zeroValue().Interface(), false
|
||||
}
|
||||
return rval.Interface(), true
|
||||
}
|
||||
|
||||
// Delete has a parametric type:
|
||||
//
|
||||
// func (om *OrdMap<K, V>) Delete(key K)
|
||||
//
|
||||
// Delete removes `key` from the map `om`.
|
||||
//
|
||||
// N.B. Delete is O(n) in the number of keys.
|
||||
func (om *OrdMap) Delete(key interface{}) {
|
||||
rkey := ty.AssertType(key, om.ktype)
|
||||
|
||||
// Avoid doing work if we don't need to.
|
||||
if !om.exists(rkey) {
|
||||
return
|
||||
}
|
||||
|
||||
keysLen := om.keys.Len()
|
||||
for i := 0; i < keysLen; i++ {
|
||||
if key == om.keys.Index(i).Interface() {
|
||||
// om.keys = append(om.keys[:i], om.keys[i+1:]...)
|
||||
om.keys = reflect.AppendSlice(
|
||||
om.keys.Slice(0, i), om.keys.Slice(i+1, keysLen))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Setting a key to a zero reflect.Value deletes the key from the map.
|
||||
om.m.SetMapIndex(rkey, reflect.Value{})
|
||||
}
|
||||
|
||||
// Keys has a parametric type:
|
||||
//
|
||||
// func (om *OrdMap<K, V>) Keys() []K
|
||||
//
|
||||
// Keys returns a list of keys in `om` in the order they were inserted.
|
||||
//
|
||||
// Behavior is undefined if the list is modified by the caller.
|
||||
func (om *OrdMap) Keys() interface{} {
|
||||
return om.keys.Interface()
|
||||
}
|
||||
|
||||
// Values has a parametric type:
|
||||
//
|
||||
// func (om *OrdMap<K, V>) Values() []V
|
||||
//
|
||||
// Values returns a shallow copy of the values in `om` in the order that they
|
||||
// were inserted.
|
||||
func (om *OrdMap) Values() interface{} {
|
||||
mlen := om.Len()
|
||||
tvals := reflect.SliceOf(om.vtype)
|
||||
rvals := reflect.MakeSlice(tvals, mlen, mlen)
|
||||
for i := 0; i < mlen; i++ {
|
||||
rvals.Index(i).Set(om.m.MapIndex(om.keys.Index(i)))
|
||||
}
|
||||
return rvals.Interface()
|
||||
}
|
||||
|
||||
// Len has a parametric type:
|
||||
//
|
||||
// func (om *OrdMap<K, V>) Len() int
|
||||
//
|
||||
// Len returns the number of keys in the map `om`.
|
||||
func (om *OrdMap) Len() int {
|
||||
return om.m.Len()
|
||||
}
|
||||
|
||||
func (om *OrdMap) zeroValue() reflect.Value {
|
||||
return reflect.New(om.vtype).Elem()
|
||||
}
|
62
Godeps/_workspace/src/github.com/BurntSushi/ty/data/ordmap_test.go
generated
vendored
Normal file
62
Godeps/_workspace/src/github.com/BurntSushi/ty/data/ordmap_test.go
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
package data
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var pf = fmt.Printf
|
||||
|
||||
func TestOrdMap(t *testing.T) {
|
||||
omap := OrderedMap(new(string), new(int))
|
||||
|
||||
omap.Put("kaitlyn", 24)
|
||||
omap.Put("andrew", 25)
|
||||
omap.Put("lauren", 20)
|
||||
omap.Put("jen", 24)
|
||||
omap.Put("brennan", 25)
|
||||
|
||||
omap.Delete("kaitlyn")
|
||||
|
||||
assertDeep(t, omap.Keys(), []string{"andrew", "lauren", "jen", "brennan"})
|
||||
assertDeep(t, omap.Values(), []int{25, 20, 24, 25})
|
||||
}
|
||||
|
||||
func ExampleOrderedMap() {
|
||||
omap := OrderedMap(new(string), new([]string))
|
||||
|
||||
omap.Put("Bruce Springsteen",
|
||||
[]string{"Thunder Road", "Born to Run", "This Hard Land"})
|
||||
omap.Put("J. Geils Band",
|
||||
[]string{"Musta Got Lost", "Freeze Frame", "Southside Shuffle"})
|
||||
omap.Put("Bob Seger",
|
||||
[]string{"Against the Wind", "Roll Me Away", "Night Moves"})
|
||||
|
||||
for _, key := range omap.Keys().([]string) {
|
||||
fmt.Println(key)
|
||||
}
|
||||
|
||||
omap.Delete("J. Geils Band")
|
||||
fmt.Println("\nDeleted 'J. Geils Band'...\n")
|
||||
|
||||
for _, key := range omap.Keys().([]string) {
|
||||
fmt.Printf("%s: %v\n", key, omap.Get(key))
|
||||
}
|
||||
|
||||
// Output:
|
||||
// Bruce Springsteen
|
||||
// J. Geils Band
|
||||
// Bob Seger
|
||||
//
|
||||
// Deleted 'J. Geils Band'...
|
||||
//
|
||||
// Bruce Springsteen: [Thunder Road Born to Run This Hard Land]
|
||||
// Bob Seger: [Against the Wind Roll Me Away Night Moves]
|
||||
}
|
||||
|
||||
func assertDeep(t *testing.T, v1, v2 interface{}) {
|
||||
if !reflect.DeepEqual(v1, v2) {
|
||||
t.Fatalf("%v != %v", v1, v2)
|
||||
}
|
||||
}
|
22
Godeps/_workspace/src/github.com/BurntSushi/ty/doc.go
generated
vendored
Normal file
22
Godeps/_workspace/src/github.com/BurntSushi/ty/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
Package ty provides utilities for writing type parametric functions with run
|
||||
time type safety.
|
||||
|
||||
This package contains two sub-packages `fun` and `data` which define some
|
||||
potentially useful functions and abstractions using the type checker in
|
||||
this package.
|
||||
|
||||
Requirements
|
||||
|
||||
Go tip (or 1.1 when it's released) is required. This package will not work
|
||||
with Go 1.0.x or earlier.
|
||||
|
||||
The very foundation of this package only recently became possible with the
|
||||
addition of 3 new functions in the standard library `reflect` package:
|
||||
SliceOf, MapOf and ChanOf. In particular, it provides the ability to
|
||||
dynamically construct types at run time from component types.
|
||||
|
||||
Further extensions to this package can be made if similar functions are added
|
||||
for structs and functions(?).
|
||||
*/
|
||||
package ty
|
84
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/chan.go
generated
vendored
Normal file
84
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/chan.go
generated
vendored
Normal file
|
@ -0,0 +1,84 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"github.com/BurntSushi/ty"
|
||||
)
|
||||
|
||||
// AsyncChan has a parametric type:
|
||||
//
|
||||
// func AsyncChan(chan A) (send chan<- A, recv <-chan A)
|
||||
//
|
||||
// AsyncChan provides a channel abstraction without a fixed size buffer.
|
||||
// The input should be a pointer to a channel that has a type without a
|
||||
// direction, e.g., `new(chan int)`. Two new channels are returned: `send` and
|
||||
// `recv`. The caller must send data on the `send` channel and receive data on
|
||||
// the `recv` channel.
|
||||
//
|
||||
// Implementation is inspired by Kyle Lemons' work:
|
||||
// https://github.com/kylelemons/iq/blob/master/iq_slice.go
|
||||
func AsyncChan(baseChan interface{}) (send, recv interface{}) {
|
||||
chk := ty.Check(
|
||||
new(func(*chan ty.A) (chan ty.A, chan ty.A)),
|
||||
baseChan)
|
||||
|
||||
// We don't care about the baseChan---it is only used to construct
|
||||
// the return types.
|
||||
tsend, trecv := chk.Returns[0], chk.Returns[1]
|
||||
|
||||
buf := make([]reflect.Value, 0, 10)
|
||||
rsend := reflect.MakeChan(tsend, 0)
|
||||
rrecv := reflect.MakeChan(trecv, 0)
|
||||
|
||||
go func() {
|
||||
defer rrecv.Close()
|
||||
|
||||
BUFLOOP:
|
||||
for {
|
||||
if len(buf) == 0 {
|
||||
rv, ok := rsend.Recv()
|
||||
if !ok {
|
||||
break BUFLOOP
|
||||
}
|
||||
buf = append(buf, rv)
|
||||
}
|
||||
|
||||
cases := []reflect.SelectCase{
|
||||
// case v, ok := <-send
|
||||
{
|
||||
Dir: reflect.SelectRecv,
|
||||
Chan: rsend,
|
||||
},
|
||||
// case recv <- buf[0]
|
||||
{
|
||||
Dir: reflect.SelectSend,
|
||||
Chan: rrecv,
|
||||
Send: buf[0],
|
||||
},
|
||||
}
|
||||
choice, rval, rok := reflect.Select(cases)
|
||||
switch choice {
|
||||
case 0:
|
||||
// case v, ok := <-send
|
||||
if !rok {
|
||||
break BUFLOOP
|
||||
}
|
||||
buf = append(buf, rval)
|
||||
case 1:
|
||||
// case recv <- buf[0]
|
||||
buf = buf[1:]
|
||||
default:
|
||||
panic("bug")
|
||||
}
|
||||
}
|
||||
for _, rv := range buf {
|
||||
rrecv.Send(rv)
|
||||
}
|
||||
}()
|
||||
|
||||
// Create the directional channel types.
|
||||
tsDir := reflect.ChanOf(reflect.SendDir, tsend.Elem())
|
||||
trDir := reflect.ChanOf(reflect.RecvDir, trecv.Elem())
|
||||
return rsend.Convert(tsDir).Interface(), rrecv.Convert(trDir).Interface()
|
||||
}
|
23
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/chan_test.go
generated
vendored
Normal file
23
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/chan_test.go
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAsyncChan(t *testing.T) {
|
||||
s, r := AsyncChan(new(chan int))
|
||||
send, recv := s.(chan<- int), r.(<-chan int)
|
||||
|
||||
sending := randIntSlice(1000, 0)
|
||||
for _, v := range sending {
|
||||
send <- v
|
||||
}
|
||||
close(send)
|
||||
|
||||
received := make([]int, 0)
|
||||
for v := range recv {
|
||||
received = append(received, v)
|
||||
}
|
||||
|
||||
assertDeep(t, sending, received)
|
||||
}
|
118
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/doc.go
generated
vendored
Normal file
118
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,118 @@
|
|||
/*
|
||||
Package fun provides type parametric utility functions for lists, sets,
|
||||
channels and maps.
|
||||
|
||||
The central contribution of this package is a set of functions that operate
|
||||
on values without depending on their types while maintaining type safety at
|
||||
run time using the `reflect` package.
|
||||
|
||||
There are two primary concerns when deciding whether to use this package
|
||||
or not: the loss of compile time type safety and performance. In particular,
|
||||
with regard to performance, most functions here are much slower than their
|
||||
built-in counter parts. However, there are a couple where the overhead of
|
||||
reflection is relatively insignificant: AsyncChan and ParMap.
|
||||
|
||||
In terms of code structure and organization, the price is mostly paid inside
|
||||
of the package due to the annoyances of operating with `reflect`. The caller
|
||||
usually only has one obligation other than to provide values consistent with
|
||||
the type of the function: type assert the result to the desired type.
|
||||
|
||||
When the caller provides values that are inconsistent with the parametric type
|
||||
of the function, the function will panic with a `TypeError`. (Either because
|
||||
the types cannot be unified or because they cannot be constructed due to
|
||||
limitations of the `reflect` package. See the `github.com/BurntSushi/ty`
|
||||
package for more details.)
|
||||
|
||||
Requirements
|
||||
|
||||
Go tip (or 1.1 when it's released) is required. This package will not work
|
||||
with Go 1.0.x or earlier.
|
||||
|
||||
The very foundation of this package only recently became possible with the
|
||||
addition of 3 new functions in the standard library `reflect` package:
|
||||
SliceOf, MapOf and ChanOf. In particular, it provides the ability to
|
||||
dynamically construct types at run time from component types.
|
||||
|
||||
Further extensions to this package can be made if similar functions are added
|
||||
for structs and functions(?).
|
||||
|
||||
Examples
|
||||
|
||||
Squaring each integer in a slice:
|
||||
|
||||
square := func(x int) int { return x * x }
|
||||
nums := []int{1, 2, 3, 4, 5}
|
||||
squares := Map(square, nums).([]int)
|
||||
|
||||
Reversing any slice:
|
||||
|
||||
slice := []string{"a", "b", "c"}
|
||||
reversed := Reverse(slice).([]string)
|
||||
|
||||
Sorting any slice:
|
||||
|
||||
// Sort a slice of structs with first class functions.
|
||||
type Album struct {
|
||||
Title string
|
||||
Year int
|
||||
}
|
||||
albums := []Album{
|
||||
{"Born to Run", 1975},
|
||||
{"WIESS", 1973},
|
||||
{"Darkness", 1978},
|
||||
{"Greetings", 1973},
|
||||
}
|
||||
|
||||
less := func(a, b Album) bool { return a.Year < b.Year },
|
||||
sorted := QuickSort(less, albums).([]Album)
|
||||
|
||||
Parallel map:
|
||||
|
||||
// Compute the prime factorization concurrently
|
||||
// for every integer in [1000, 10000].
|
||||
primeFactors := func(n int) []int { // compute prime factors }
|
||||
factors := ParMap(primeFactors, Range(1000, 10001)).([]int)
|
||||
|
||||
Asynchronous channel without a fixed size buffer:
|
||||
|
||||
s, r := AsyncChan(new(chan int))
|
||||
send, recv := s.(chan<- int), r.(<-chan int)
|
||||
|
||||
// Send as much as you want.
|
||||
for i := 0; i < 100; i++ {
|
||||
s <- i
|
||||
}
|
||||
close(s)
|
||||
for i := range recv {
|
||||
// do something with `i`
|
||||
}
|
||||
|
||||
Shuffle any slice in place:
|
||||
|
||||
jumbleMe := []string{"The", "quick", "brown", "fox"}
|
||||
Shuffle(jumbleMe)
|
||||
|
||||
Function memoization:
|
||||
|
||||
// Memoizing a recursive function like `fibonacci`.
|
||||
// Write it like normal:
|
||||
var fib func(n int64) int64
|
||||
fib = func(n int64) int64 {
|
||||
switch n {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return 1
|
||||
}
|
||||
return fib(n - 1) + fib(n - 2)
|
||||
}
|
||||
|
||||
// And wrap it with `Memo`.
|
||||
fib = Memo(fib).(func(int64) int64)
|
||||
|
||||
// Will keep your CPU busy for a long time
|
||||
// without memoization.
|
||||
fmt.Println(fib(80))
|
||||
|
||||
*/
|
||||
package fun
|
51
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/fun_test.go
generated
vendored
Normal file
51
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/fun_test.go
generated
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
pf = fmt.Printf
|
||||
rng *rand.Rand
|
||||
flagBuiltin = false
|
||||
)
|
||||
|
||||
func init() {
|
||||
rng = rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
|
||||
flag.BoolVar(&flagBuiltin, "builtin", flagBuiltin,
|
||||
"When set, benchmarks for non-type parametric functions are run.")
|
||||
}
|
||||
|
||||
func assertDeep(t *testing.T, v1, v2 interface{}) {
|
||||
if !reflect.DeepEqual(v1, v2) {
|
||||
t.Fatalf("%v != %v", v1, v2)
|
||||
}
|
||||
}
|
||||
|
||||
func randIntSlice(size, max int) []int {
|
||||
if max == 0 {
|
||||
max = 1000000
|
||||
}
|
||||
slice := make([]int, size)
|
||||
for i := 0; i < size; i++ {
|
||||
slice[i] = rng.Intn(max)
|
||||
}
|
||||
return slice
|
||||
}
|
||||
|
||||
func randInt64Slice(size, max int64) []int64 {
|
||||
if max == 0 {
|
||||
max = 1000000
|
||||
}
|
||||
slice := make([]int64, size)
|
||||
for i := int64(0); i < size; i++ {
|
||||
slice[i] = rng.Int63n(max)
|
||||
}
|
||||
return slice
|
||||
}
|
35
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/func.go
generated
vendored
Normal file
35
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/func.go
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"github.com/BurntSushi/ty"
|
||||
)
|
||||
|
||||
// Memo has a parametric type:
|
||||
//
|
||||
// func Memo(f func(A) B) func(A) B
|
||||
//
|
||||
// Memo memoizes any function of a single argument that returns a single value.
|
||||
// The type `A` must be a Go type for which the comparison operators `==` and
|
||||
// `!=` are fully defined (this rules out functions, maps and slices).
|
||||
func Memo(f interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A) ty.B)),
|
||||
f)
|
||||
vf := chk.Args[0]
|
||||
|
||||
saved := make(map[interface{}]reflect.Value)
|
||||
memo := func(in []reflect.Value) []reflect.Value {
|
||||
val := in[0].Interface()
|
||||
ret, ok := saved[val]
|
||||
if ok {
|
||||
return []reflect.Value{ret}
|
||||
}
|
||||
|
||||
ret = call1(vf, in[0])
|
||||
saved[val] = ret
|
||||
return []reflect.Value{ret}
|
||||
}
|
||||
return reflect.MakeFunc(vf.Type(), memo).Interface()
|
||||
}
|
111
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/func_test.go
generated
vendored
Normal file
111
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/func_test.go
generated
vendored
Normal file
|
@ -0,0 +1,111 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// The benchmarks are the test. The memo version (w/ reflection or not) should
|
||||
// always be faster than the non-memo version given big enough N.
|
||||
|
||||
func BenchmarkFibonacciMemo(b *testing.B) {
|
||||
if flagBuiltin {
|
||||
benchmarkFibonacciMemoBuiltin(b)
|
||||
} else {
|
||||
benchmarkFibonacciMemoReflect(b)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkFibonacciMemoBuiltin(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
fibonacciMemoBuiltin(30)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkFibonacciMemoReflect(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
fibonacciMemoReflect(30)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkFibonacciNoMemo(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
fibonacci(30)
|
||||
}
|
||||
}
|
||||
|
||||
func fibonacciMemoBuiltin(n int) int {
|
||||
memo := func(f func(int) int) func(int) int {
|
||||
saved := make(map[int]int)
|
||||
return func(n int) int {
|
||||
ret, ok := saved[n]
|
||||
if ok {
|
||||
return ret
|
||||
}
|
||||
|
||||
ret = f(n)
|
||||
saved[n] = ret
|
||||
return ret
|
||||
}
|
||||
}
|
||||
|
||||
var fib func(n int) int
|
||||
fib = memo(func(n int) int {
|
||||
switch n {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return 1
|
||||
}
|
||||
return fib(n-1) + fib(n-2)
|
||||
})
|
||||
return fib(n)
|
||||
}
|
||||
|
||||
func fibonacciMemoReflect(n int) int {
|
||||
var fib func(n int) int
|
||||
fib = Memo(func(n int) int {
|
||||
switch n {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return 1
|
||||
}
|
||||
return fib(n-1) + fib(n-2)
|
||||
}).(func(int) int)
|
||||
return fib(n)
|
||||
}
|
||||
|
||||
func fibonacci(n int) int {
|
||||
switch n {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return 1
|
||||
}
|
||||
return fibonacci(n-1) + fibonacci(n-2)
|
||||
}
|
||||
|
||||
func ExampleMemo() {
|
||||
// Memoizing a recursive function like `fibonacci`.
|
||||
// Write it like normal:
|
||||
var fib func(n int64) int64
|
||||
fib = func(n int64) int64 {
|
||||
switch n {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return 1
|
||||
}
|
||||
return fib(n-1) + fib(n-2)
|
||||
}
|
||||
|
||||
// And wrap it with `Memo`.
|
||||
fib = Memo(fib).(func(int64) int64)
|
||||
|
||||
// Will keep your CPU busy for a long time
|
||||
// without memoization.
|
||||
fmt.Println(fib(80))
|
||||
// Output:
|
||||
// 23416728348467685
|
||||
}
|
303
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/list.go
generated
vendored
Normal file
303
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/list.go
generated
vendored
Normal file
|
@ -0,0 +1,303 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"runtime"
|
||||
"sync"
|
||||
|
||||
"github.com/BurntSushi/ty"
|
||||
)
|
||||
|
||||
// All has a parametric type:
|
||||
//
|
||||
// func All(p func(A) bool, xs []A) bool
|
||||
//
|
||||
// All returns `true` if and only if every element in `xs` satisfies `p`.
|
||||
func All(f, xs interface{}) bool {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A) bool, []ty.A) bool),
|
||||
f, xs)
|
||||
vf, vxs := chk.Args[0], chk.Args[1]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
for i := 0; i < xsLen; i++ {
|
||||
if !call1(vf, vxs.Index(i)).Interface().(bool) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Exists has a parametric type:
|
||||
//
|
||||
// func Exists(p func(A) bool, xs []A) bool
|
||||
//
|
||||
// Exists returns `true` if and only if an element in `xs` satisfies `p`.
|
||||
func Exists(f, xs interface{}) bool {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A) bool, []ty.A) bool),
|
||||
f, xs)
|
||||
vf, vxs := chk.Args[0], chk.Args[1]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
for i := 0; i < xsLen; i++ {
|
||||
if call1(vf, vxs.Index(i)).Interface().(bool) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// In has a parametric type:
|
||||
//
|
||||
// func In(needle A, haystack []A) bool
|
||||
//
|
||||
// In returns `true` if and only if `v` can be found in `xs`. The equality test
|
||||
// used is Go's standard `==` equality and NOT deep equality.
|
||||
//
|
||||
// Note that this requires that `A` be a type that can be meaningfully compared.
|
||||
func In(needle, haystack interface{}) bool {
|
||||
chk := ty.Check(
|
||||
new(func(ty.A, []ty.A) bool),
|
||||
needle, haystack)
|
||||
vhaystack := chk.Args[1]
|
||||
|
||||
length := vhaystack.Len()
|
||||
for i := 0; i < length; i++ {
|
||||
if vhaystack.Index(i).Interface() == needle {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Map has a parametric type:
|
||||
//
|
||||
// func Map(f func(A) B, xs []A) []B
|
||||
//
|
||||
// Map returns the list corresponding to the return value of applying
|
||||
// `f` to each element in `xs`.
|
||||
func Map(f, xs interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A) ty.B, []ty.A) []ty.B),
|
||||
f, xs)
|
||||
vf, vxs, tys := chk.Args[0], chk.Args[1], chk.Returns[0]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
vys := reflect.MakeSlice(tys, xsLen, xsLen)
|
||||
for i := 0; i < xsLen; i++ {
|
||||
vy := call1(vf, vxs.Index(i))
|
||||
vys.Index(i).Set(vy)
|
||||
}
|
||||
return vys.Interface()
|
||||
}
|
||||
|
||||
// Filter has a parametric type:
|
||||
//
|
||||
// func Filter(p func(A) bool, xs []A) []A
|
||||
//
|
||||
// Filter returns a new list only containing the elements of `xs` that satisfy
|
||||
// the predicate `p`.
|
||||
func Filter(p, xs interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A) bool, []ty.A) []ty.A),
|
||||
p, xs)
|
||||
vp, vxs, tys := chk.Args[0], chk.Args[1], chk.Returns[0]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
vys := reflect.MakeSlice(tys, 0, xsLen)
|
||||
for i := 0; i < xsLen; i++ {
|
||||
vx := vxs.Index(i)
|
||||
if call1(vp, vx).Bool() {
|
||||
vys = reflect.Append(vys, vx)
|
||||
}
|
||||
}
|
||||
return vys.Interface()
|
||||
}
|
||||
|
||||
// Foldl has a parametric type:
|
||||
//
|
||||
// func Foldl(f func(A, B) B, init B, xs []A) B
|
||||
//
|
||||
// Foldl reduces a list of A to a single element B using a left fold with
|
||||
// an initial value `init`.
|
||||
func Foldl(f, init, xs interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A, ty.B) ty.B, ty.B, []ty.A) ty.B),
|
||||
f, init, xs)
|
||||
vf, vinit, vxs, tb := chk.Args[0], chk.Args[1], chk.Args[2], chk.Returns[0]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
vb := zeroValue(tb)
|
||||
vb.Set(vinit)
|
||||
if xsLen == 0 {
|
||||
return vb.Interface()
|
||||
}
|
||||
|
||||
vb.Set(call1(vf, vxs.Index(0), vb))
|
||||
for i := 1; i < xsLen; i++ {
|
||||
vb.Set(call1(vf, vxs.Index(i), vb))
|
||||
}
|
||||
return vb.Interface()
|
||||
}
|
||||
|
||||
// Foldr has a parametric type:
|
||||
//
|
||||
// func Foldr(f func(A, B) B, init B, xs []A) B
|
||||
//
|
||||
// Foldr reduces a list of A to a single element B using a right fold with
|
||||
// an initial value `init`.
|
||||
func Foldr(f, init, xs interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A, ty.B) ty.B, ty.B, []ty.A) ty.B),
|
||||
f, init, xs)
|
||||
vf, vinit, vxs, tb := chk.Args[0], chk.Args[1], chk.Args[2], chk.Returns[0]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
vb := zeroValue(tb)
|
||||
vb.Set(vinit)
|
||||
if xsLen == 0 {
|
||||
return vb.Interface()
|
||||
}
|
||||
|
||||
vb.Set(call1(vf, vxs.Index(xsLen-1), vb))
|
||||
for i := xsLen - 2; i >= 0; i-- {
|
||||
vb.Set(call1(vf, vxs.Index(i), vb))
|
||||
}
|
||||
return vb.Interface()
|
||||
}
|
||||
|
||||
// Concat has a parametric type:
|
||||
//
|
||||
// func Concat(xs [][]A) []A
|
||||
//
|
||||
// Concat returns a new flattened list by appending all elements of `xs`.
|
||||
func Concat(xs interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func([][]ty.A) []ty.A),
|
||||
xs)
|
||||
vxs, tflat := chk.Args[0], chk.Returns[0]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
vflat := reflect.MakeSlice(tflat, 0, xsLen*3)
|
||||
for i := 0; i < xsLen; i++ {
|
||||
vflat = reflect.AppendSlice(vflat, vxs.Index(i))
|
||||
}
|
||||
return vflat.Interface()
|
||||
}
|
||||
|
||||
// Reverse has a parametric type:
|
||||
//
|
||||
// func Reverse(xs []A) []A
|
||||
//
|
||||
// Reverse returns a new slice that is the reverse of `xs`.
|
||||
func Reverse(xs interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func([]ty.A) []ty.A),
|
||||
xs)
|
||||
vxs, tys := chk.Args[0], chk.Returns[0]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
vys := reflect.MakeSlice(tys, xsLen, xsLen)
|
||||
for i := 0; i < xsLen; i++ {
|
||||
vys.Index(i).Set(vxs.Index(xsLen - 1 - i))
|
||||
}
|
||||
return vys.Interface()
|
||||
}
|
||||
|
||||
// Copy has a parametric type:
|
||||
//
|
||||
// func Copy(xs []A) []A
|
||||
//
|
||||
// Copy returns a copy of `xs` using Go's `copy` operation.
|
||||
func Copy(xs interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func([]ty.A) []ty.A),
|
||||
xs)
|
||||
vxs, tys := chk.Args[0], chk.Returns[0]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
vys := reflect.MakeSlice(tys, xsLen, xsLen)
|
||||
reflect.Copy(vys, vxs)
|
||||
return vys.Interface()
|
||||
}
|
||||
|
||||
// ParMap has a parametric type:
|
||||
//
|
||||
// func ParMap(f func(A) B, xs []A) []B
|
||||
//
|
||||
// ParMap is just like Map, except it applies `f` to each element in `xs`
|
||||
// concurrently using N worker goroutines (where N is the number of CPUs
|
||||
// available reported by the Go runtime). If you want to control the number
|
||||
// of goroutines spawned, use `ParMapN`.
|
||||
//
|
||||
// It is important that `f` not be a trivial operation, otherwise the overhead
|
||||
// of executing it concurrently will result in worse performance than using
|
||||
// a `Map`.
|
||||
func ParMap(f, xs interface{}) interface{} {
|
||||
n := runtime.NumCPU()
|
||||
if n < 1 {
|
||||
n = 1
|
||||
}
|
||||
return ParMapN(f, xs, n)
|
||||
}
|
||||
|
||||
// ParMapN has a parametric type:
|
||||
//
|
||||
// func ParMapN(f func(A) B, xs []A, n int) []B
|
||||
//
|
||||
// ParMapN is just like Map, except it applies `f` to each element in `xs`
|
||||
// concurrently using `n` worker goroutines.
|
||||
//
|
||||
// It is important that `f` not be a trivial operation, otherwise the overhead
|
||||
// of executing it concurrently will result in worse performance than using
|
||||
// a `Map`.
|
||||
func ParMapN(f, xs interface{}, n int) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A) ty.B, []ty.A) []ty.B),
|
||||
f, xs)
|
||||
vf, vxs, tys := chk.Args[0], chk.Args[1], chk.Returns[0]
|
||||
|
||||
xsLen := vxs.Len()
|
||||
ys := reflect.MakeSlice(tys, xsLen, xsLen)
|
||||
|
||||
if n < 1 {
|
||||
n = 1
|
||||
}
|
||||
work := make(chan int, n)
|
||||
wg := new(sync.WaitGroup)
|
||||
for i := 0; i < n; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
for j := range work {
|
||||
// Good golly miss molly. Is `reflect.Value.Index`
|
||||
// safe to access/set from multiple goroutines?
|
||||
// XXX: If not, we'll need an extra wave of allocation to
|
||||
// use real slices of `reflect.Value`.
|
||||
ys.Index(j).Set(call1(vf, vxs.Index(j)))
|
||||
}
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
for i := 0; i < xsLen; i++ {
|
||||
work <- i
|
||||
}
|
||||
close(work)
|
||||
wg.Wait()
|
||||
return ys.Interface()
|
||||
}
|
||||
|
||||
// Range generates a list of integers corresponding to every integer in
|
||||
// the half-open interval [x, y).
|
||||
//
|
||||
// Range will panic if `end < start`.
|
||||
func Range(start, end int) []int {
|
||||
if end < start {
|
||||
panic("range must have end greater than or equal to start")
|
||||
}
|
||||
r := make([]int, end-start)
|
||||
for i := start; i < end; i++ {
|
||||
r[i-start] = i
|
||||
}
|
||||
return r
|
||||
}
|
148
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/list_parmap_test.go
generated
vendored
Normal file
148
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/list_parmap_test.go
generated
vendored
Normal file
|
@ -0,0 +1,148 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"math"
|
||||
"runtime"
|
||||
"sync"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParMap(t *testing.T) {
|
||||
square := func(x int) int { return x * x }
|
||||
squares := ParMap(square, []int{1, 2, 3, 4, 5}).([]int)
|
||||
|
||||
assertDeep(t, squares, []int{1, 4, 9, 16, 25})
|
||||
assertDeep(t, []int{}, ParMap(square, []int{}).([]int))
|
||||
}
|
||||
|
||||
func BenchmarkParMapSquare(b *testing.B) {
|
||||
if flagBuiltin {
|
||||
benchmarkParMapSquareBuiltin(b)
|
||||
} else {
|
||||
benchmarkParMapSquareReflect(b)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkParMapSquareReflect(b *testing.B) {
|
||||
b.StopTimer()
|
||||
square := func(a int64) int64 { return a * a }
|
||||
list := randInt64Slice(1000, 1<<30)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = ParMap(square, list).([]int64)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkParMapSquareBuiltin(b *testing.B) {
|
||||
b.StopTimer()
|
||||
square := func(a int64) int64 { return a * a }
|
||||
list := randInt64Slice(1000, 1<<30)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = parMapInt64(square, list)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkParMapPrime(b *testing.B) {
|
||||
if flagBuiltin {
|
||||
benchmarkParMapPrimeBuiltin(b)
|
||||
} else {
|
||||
benchmarkParMapPrimeReflect(b)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkParMapPrimeReflect(b *testing.B) {
|
||||
b.StopTimer()
|
||||
list := randInt64Slice(1000, 1<<30)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = ParMap(primeFactors, list).([][]int64)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkParMapPrimeBuiltin(b *testing.B) {
|
||||
b.StopTimer()
|
||||
list := randInt64Slice(1000, 1<<30)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = parMapSliceInt64(primeFactors, list)
|
||||
}
|
||||
}
|
||||
|
||||
func primeFactors(n int64) []int64 {
|
||||
if isPrime(n) {
|
||||
return []int64{n}
|
||||
}
|
||||
|
||||
bound := int64(math.Floor(math.Sqrt(float64(n))))
|
||||
for i := int64(2); i <= bound; i++ {
|
||||
if n%i == 0 {
|
||||
return append(primeFactors(i), primeFactors(n/i)...)
|
||||
}
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func isPrime(n int64) bool {
|
||||
bound := int64(math.Floor(math.Sqrt(float64(n))))
|
||||
for i := int64(2); i <= bound; i++ {
|
||||
if n%i == 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func parMapInt64(f func(n int64) int64, xs []int64) []int64 {
|
||||
ys := make([]int64, len(xs), len(xs))
|
||||
N := runtime.NumCPU()
|
||||
if N < 1 {
|
||||
N = 1
|
||||
}
|
||||
work := make(chan int, N)
|
||||
wg := new(sync.WaitGroup)
|
||||
for i := 0; i < N; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
for j := range work {
|
||||
ys[j] = f(xs[j])
|
||||
}
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
for i := 0; i < len(xs); i++ {
|
||||
work <- i
|
||||
}
|
||||
close(work)
|
||||
wg.Wait()
|
||||
return ys
|
||||
}
|
||||
|
||||
func parMapSliceInt64(f func(n int64) []int64, xs []int64) [][]int64 {
|
||||
ys := make([][]int64, len(xs), len(xs))
|
||||
N := runtime.NumCPU()
|
||||
if N < 1 {
|
||||
N = 1
|
||||
}
|
||||
work := make(chan int, N)
|
||||
wg := new(sync.WaitGroup)
|
||||
for i := 0; i < N; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
for j := range work {
|
||||
ys[j] = f(xs[j])
|
||||
}
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
for i := 0; i < len(xs); i++ {
|
||||
work <- i
|
||||
}
|
||||
close(work)
|
||||
wg.Wait()
|
||||
return ys
|
||||
}
|
149
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/list_test.go
generated
vendored
Normal file
149
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/list_test.go
generated
vendored
Normal file
|
@ -0,0 +1,149 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMap(t *testing.T) {
|
||||
square := func(x int) int { return x * x }
|
||||
squares := Map(square, []int{1, 2, 3, 4, 5}).([]int)
|
||||
|
||||
assertDeep(t, squares, []int{1, 4, 9, 16, 25})
|
||||
assertDeep(t, []int{}, Map(square, []int{}).([]int))
|
||||
|
||||
strlen := func(s string) int { return len(s) }
|
||||
lens := Map(strlen, []string{"abc", "ab", "a"}).([]int)
|
||||
assertDeep(t, lens, []int{3, 2, 1})
|
||||
}
|
||||
|
||||
func TestFilter(t *testing.T) {
|
||||
even := func(x int) bool { return x%2 == 0 }
|
||||
evens := Filter(even, []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}).([]int)
|
||||
|
||||
assertDeep(t, evens, []int{2, 4, 6, 8, 10})
|
||||
assertDeep(t, []int{}, Filter(even, []int{}).([]int))
|
||||
}
|
||||
|
||||
func TestFoldl(t *testing.T) {
|
||||
// Use an operation that isn't associative so that we know we've got
|
||||
// the left/right folds done correctly.
|
||||
reducer := func(a, b int) int { return b % a }
|
||||
v := Foldl(reducer, 7, []int{4, 5, 6}).(int)
|
||||
|
||||
assertDeep(t, v, 3)
|
||||
assertDeep(t, 0, Foldl(reducer, 0, []int{}).(int))
|
||||
}
|
||||
|
||||
func TestFoldr(t *testing.T) {
|
||||
// Use an operation that isn't associative so that we know we've got
|
||||
// the left/right folds done correctly.
|
||||
reducer := func(a, b int) int { return b % a }
|
||||
v := Foldr(reducer, 7, []int{4, 5, 6}).(int)
|
||||
|
||||
assertDeep(t, v, 1)
|
||||
assertDeep(t, 0, Foldr(reducer, 0, []int{}).(int))
|
||||
}
|
||||
|
||||
func TestConcat(t *testing.T) {
|
||||
toflat := [][]int{
|
||||
{1, 2, 3},
|
||||
{4, 5, 6},
|
||||
{7, 8, 9},
|
||||
}
|
||||
flat := Concat(toflat).([]int)
|
||||
|
||||
assertDeep(t, flat, []int{1, 2, 3, 4, 5, 6, 7, 8, 9})
|
||||
}
|
||||
|
||||
func TestReverse(t *testing.T) {
|
||||
reversed := Reverse([]int{1, 2, 3, 4, 5}).([]int)
|
||||
|
||||
assertDeep(t, reversed, []int{5, 4, 3, 2, 1})
|
||||
}
|
||||
|
||||
func TestCopy(t *testing.T) {
|
||||
orig := []int{1, 2, 3, 4, 5}
|
||||
copied := Copy(orig).([]int)
|
||||
|
||||
orig[1] = 999
|
||||
|
||||
assertDeep(t, copied, []int{1, 2, 3, 4, 5})
|
||||
}
|
||||
|
||||
func TestPointers(t *testing.T) {
|
||||
type temp struct {
|
||||
val int
|
||||
}
|
||||
square := func(t *temp) *temp { return &temp{t.val * t.val} }
|
||||
squares := Map(square, []*temp{
|
||||
{1}, {2}, {3}, {4}, {5},
|
||||
})
|
||||
|
||||
assertDeep(t, squares, []*temp{
|
||||
{1}, {4}, {9}, {16}, {25},
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkMapSquare(b *testing.B) {
|
||||
if flagBuiltin {
|
||||
benchmarkMapSquareBuiltin(b)
|
||||
} else {
|
||||
benchmarkMapSquareReflect(b)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkMapSquareReflect(b *testing.B) {
|
||||
b.StopTimer()
|
||||
square := func(a int64) int64 { return a * a }
|
||||
list := randInt64Slice(1000, 1<<30)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = Map(square, list).([]int64)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkMapSquareBuiltin(b *testing.B) {
|
||||
b.StopTimer()
|
||||
square := func(a int64) int64 { return a * a }
|
||||
list := randInt64Slice(1000, 1<<30)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
ret := make([]int64, len(list))
|
||||
for i := 0; i < len(list); i++ {
|
||||
ret[i] = square(list[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkMapPrime(b *testing.B) {
|
||||
if flagBuiltin {
|
||||
benchmarkMapPrimeBuiltin(b)
|
||||
} else {
|
||||
benchmarkMapPrimeReflect(b)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkMapPrimeReflect(b *testing.B) {
|
||||
b.StopTimer()
|
||||
list := randInt64Slice(1000, 1<<30)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = Map(primeFactors, list).([][]int64)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkMapPrimeBuiltin(b *testing.B) {
|
||||
b.StopTimer()
|
||||
list := randInt64Slice(1000, 1<<30)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
ret := make([][]int64, len(list))
|
||||
for i := 0; i < len(list); i++ {
|
||||
ret[i] = primeFactors(list[i])
|
||||
}
|
||||
}
|
||||
}
|
46
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/map.go
generated
vendored
Normal file
46
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/map.go
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"github.com/BurntSushi/ty"
|
||||
)
|
||||
|
||||
// Keys has a parametric type:
|
||||
//
|
||||
// func Keys(m map[A]B) []A
|
||||
//
|
||||
// Keys returns a list of the keys of `m` in an unspecified order.
|
||||
func Keys(m interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(map[ty.A]ty.B) []ty.A),
|
||||
m)
|
||||
vm, tkeys := chk.Args[0], chk.Returns[0]
|
||||
|
||||
vkeys := reflect.MakeSlice(tkeys, vm.Len(), vm.Len())
|
||||
for i, vkey := range vm.MapKeys() {
|
||||
vkeys.Index(i).Set(vkey)
|
||||
}
|
||||
return vkeys.Interface()
|
||||
}
|
||||
|
||||
// Values has a parametric type:
|
||||
//
|
||||
// func Values(m map[A]B) []B
|
||||
//
|
||||
// Values returns a list of the values of `m` in an unspecified order.
|
||||
func Values(m interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(map[ty.A]ty.B) []ty.B),
|
||||
m)
|
||||
vm, tvals := chk.Args[0], chk.Returns[0]
|
||||
|
||||
vvals := reflect.MakeSlice(tvals, vm.Len(), vm.Len())
|
||||
for i, vkey := range vm.MapKeys() {
|
||||
vvals.Index(i).Set(vm.MapIndex(vkey))
|
||||
}
|
||||
return vvals.Interface()
|
||||
}
|
||||
|
||||
// func MapMerge(m1, m2 interface{}) interface{} {
|
||||
// }
|
25
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/map_test.go
generated
vendored
Normal file
25
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/map_test.go
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestKeys(t *testing.T) {
|
||||
m := map[string]int{
|
||||
"c": 0, "b": 0, "a": 0,
|
||||
}
|
||||
keys := Keys(m).([]string)
|
||||
|
||||
scmp := func(a, b string) bool { return a < b }
|
||||
assertDeep(t, []string{"a", "b", "c"}, QuickSort(scmp, keys))
|
||||
}
|
||||
|
||||
func TestValues(t *testing.T) {
|
||||
m := map[int]string{
|
||||
1: "c", 2: "b", 3: "a",
|
||||
}
|
||||
vals := Values(m).([]string)
|
||||
|
||||
scmp := func(a, b string) bool { return a < b }
|
||||
assertDeep(t, []string{"a", "b", "c"}, QuickSort(scmp, vals))
|
||||
}
|
94
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/rand.go
generated
vendored
Normal file
94
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/rand.go
generated
vendored
Normal file
|
@ -0,0 +1,94 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"github.com/BurntSushi/ty"
|
||||
)
|
||||
|
||||
var randNumGen *rand.Rand
|
||||
|
||||
func init() {
|
||||
randNumGen = rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
}
|
||||
|
||||
// ShuffleGen has a parametric type:
|
||||
//
|
||||
// func ShuffleGen(xs []A, rng *rand.Rand)
|
||||
//
|
||||
// ShuffleGen shuffles `xs` in place using the given random number
|
||||
// generator `rng`.
|
||||
func ShuffleGen(xs interface{}, rng *rand.Rand) {
|
||||
chk := ty.Check(
|
||||
new(func([]ty.A, *rand.Rand)),
|
||||
xs, rng)
|
||||
vxs := chk.Args[0]
|
||||
|
||||
// Implements the Fisher-Yates shuffle: http://goo.gl/Hb9vg
|
||||
xsLen := vxs.Len()
|
||||
swapper := swapperOf(vxs.Type().Elem())
|
||||
for i := xsLen - 1; i >= 1; i-- {
|
||||
j := rng.Intn(i + 1)
|
||||
swapper.swap(vxs.Index(i), vxs.Index(j))
|
||||
}
|
||||
}
|
||||
|
||||
// Shuffle has a parametric type:
|
||||
//
|
||||
// func Shuffle(xs []A)
|
||||
//
|
||||
// Shuffle shuffles `xs` in place using a default random number
|
||||
// generator seeded once at program initialization.
|
||||
func Shuffle(xs interface{}) {
|
||||
ShuffleGen(xs, randNumGen)
|
||||
}
|
||||
|
||||
// Sample has a parametric type:
|
||||
//
|
||||
// func Sample(population []A, n int) []A
|
||||
//
|
||||
// Sample returns a random sample of size `n` from a list
|
||||
// `population` using a default random number generator seeded once at
|
||||
// program initialization.
|
||||
// All elements in `population` have an equal chance of being selected.
|
||||
// If `n` is greater than the size of `population`, then `n` is set to
|
||||
// the size of the population.
|
||||
func Sample(population interface{}, n int) interface{} {
|
||||
return SampleGen(population, n, randNumGen)
|
||||
}
|
||||
|
||||
// SampleGen has a parametric type:
|
||||
//
|
||||
// func SampleGen(population []A, n int, rng *rand.Rand) []A
|
||||
//
|
||||
// SampleGen returns a random sample of size `n` from a list
|
||||
// `population` using a given random number generator `rng`.
|
||||
// All elements in `population` have an equal chance of being selected.
|
||||
// If `n` is greater than the size of `population`, then `n` is set to
|
||||
// the size of the population.
|
||||
func SampleGen(population interface{}, n int, rng *rand.Rand) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func([]ty.A, int, *rand.Rand) []ty.A),
|
||||
population, n, rng)
|
||||
rpop, tsamp := chk.Args[0], chk.Returns[0]
|
||||
|
||||
popLen := rpop.Len()
|
||||
if n == 0 {
|
||||
return reflect.MakeSlice(tsamp, 0, 0).Interface()
|
||||
}
|
||||
if n > popLen {
|
||||
n = popLen
|
||||
}
|
||||
|
||||
// TODO(burntsushi): Implement an algorithm that doesn't depend on
|
||||
// the size of the population.
|
||||
|
||||
rsamp := reflect.MakeSlice(tsamp, n, n)
|
||||
choices := rng.Perm(popLen)
|
||||
for i := 0; i < n; i++ {
|
||||
rsamp.Index(i).Set(rpop.Index(choices[i]))
|
||||
}
|
||||
return rsamp.Interface()
|
||||
}
|
100
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/rand_test.go
generated
vendored
Normal file
100
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/rand_test.go
generated
vendored
Normal file
|
@ -0,0 +1,100 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestShuffle(t *testing.T) {
|
||||
nums := Range(0, 100)
|
||||
Shuffle(nums)
|
||||
|
||||
assertDeep(t, Set(nums), Set(Range(0, 100)))
|
||||
}
|
||||
|
||||
func TestSample(t *testing.T) {
|
||||
rng := rand.New(rand.NewSource(0))
|
||||
nums := Range(0, 100)
|
||||
sample := SampleGen(nums, 3, rng).([]int)
|
||||
|
||||
assertDeep(t, Set([]int{35, 40, 50}), Set(sample))
|
||||
}
|
||||
|
||||
func BenchmarkShuffle(b *testing.B) {
|
||||
if flagBuiltin {
|
||||
benchmarkShuffleBuiltin(b)
|
||||
} else {
|
||||
benchmarkShuffleReflect(b)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkShuffleBuiltin(b *testing.B) {
|
||||
b.StopTimer()
|
||||
list := randIntSlice(10000, 0)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
shuffle(list)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkShuffleReflect(b *testing.B) {
|
||||
b.StopTimer()
|
||||
list := randIntSlice(10000, 0)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
Shuffle(list)
|
||||
}
|
||||
}
|
||||
|
||||
func shuffle(xs []int) {
|
||||
for i := len(xs) - 1; i >= 1; i-- {
|
||||
j := randNumGen.Intn(i + 1)
|
||||
xs[i], xs[j] = xs[j], xs[i]
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSample(b *testing.B) {
|
||||
if flagBuiltin {
|
||||
benchmarkSampleBuiltin(b)
|
||||
} else {
|
||||
benchmarkSampleReflect(b)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkSampleBuiltin(b *testing.B) {
|
||||
b.StopTimer()
|
||||
list := randIntSlice(10000, 0)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
sample(list, 100)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkSampleReflect(b *testing.B) {
|
||||
b.StopTimer()
|
||||
list := randIntSlice(10000, 0)
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
Sample(list, 100)
|
||||
}
|
||||
}
|
||||
|
||||
func sample(pop []int, n int) []int {
|
||||
if n == 0 {
|
||||
return []int{}
|
||||
}
|
||||
if n > len(pop) {
|
||||
n = len(pop)
|
||||
}
|
||||
|
||||
samp := make([]int, n, n)
|
||||
choices := randNumGen.Perm(len(pop))
|
||||
for i := 0; i < n; i++ {
|
||||
samp[i] = pop[choices[i]]
|
||||
}
|
||||
return samp
|
||||
}
|
99
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/set.go
generated
vendored
Normal file
99
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/set.go
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"github.com/BurntSushi/ty"
|
||||
)
|
||||
|
||||
// Set has a parametric type:
|
||||
//
|
||||
// func Set(xs []A) map[A]bool
|
||||
//
|
||||
// Set creates a set from a list.
|
||||
func Set(xs interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func([]ty.A) map[ty.A]bool),
|
||||
xs)
|
||||
vxs, tset := chk.Args[0], chk.Returns[0]
|
||||
|
||||
vtrue := reflect.ValueOf(true)
|
||||
vset := reflect.MakeMap(tset)
|
||||
xsLen := vxs.Len()
|
||||
for i := 0; i < xsLen; i++ {
|
||||
vset.SetMapIndex(vxs.Index(i), vtrue)
|
||||
}
|
||||
return vset.Interface()
|
||||
}
|
||||
|
||||
// Union has a parametric type:
|
||||
//
|
||||
// func Union(a map[A]bool, b map[A]bool) map[A]bool
|
||||
//
|
||||
// Union returns the union of two sets, where a set is represented as a
|
||||
// `map[A]bool`. The sets `a` and `b` are not modified.
|
||||
func Union(a, b interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(map[ty.A]bool, map[ty.A]bool) map[ty.A]bool),
|
||||
a, b)
|
||||
va, vb, tc := chk.Args[0], chk.Args[1], chk.Returns[0]
|
||||
|
||||
vtrue := reflect.ValueOf(true)
|
||||
vc := reflect.MakeMap(tc)
|
||||
for _, vkey := range va.MapKeys() {
|
||||
vc.SetMapIndex(vkey, vtrue)
|
||||
}
|
||||
for _, vkey := range vb.MapKeys() {
|
||||
vc.SetMapIndex(vkey, vtrue)
|
||||
}
|
||||
return vc.Interface()
|
||||
}
|
||||
|
||||
// Intersection has a parametric type:
|
||||
//
|
||||
// func Intersection(a map[A]bool, b map[A]bool) map[A]bool
|
||||
//
|
||||
// Intersection returns the intersection of two sets, where a set is
|
||||
// represented as a `map[A]bool`. The sets `a` and `b` are not modified.
|
||||
func Intersection(a, b interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(map[ty.A]bool, map[ty.A]bool) map[ty.A]bool),
|
||||
a, b)
|
||||
va, vb, tc := chk.Args[0], chk.Args[1], chk.Returns[0]
|
||||
|
||||
vtrue := reflect.ValueOf(true)
|
||||
vc := reflect.MakeMap(tc)
|
||||
for _, vkey := range va.MapKeys() {
|
||||
if vb.MapIndex(vkey).IsValid() {
|
||||
vc.SetMapIndex(vkey, vtrue)
|
||||
}
|
||||
}
|
||||
for _, vkey := range vb.MapKeys() {
|
||||
if va.MapIndex(vkey).IsValid() {
|
||||
vc.SetMapIndex(vkey, vtrue)
|
||||
}
|
||||
}
|
||||
return vc.Interface()
|
||||
}
|
||||
|
||||
// Difference has a parametric type:
|
||||
//
|
||||
// func Difference(a map[A]bool, b map[A]bool) map[A]bool
|
||||
//
|
||||
// Difference returns a set with all elements in `a` that are not in `b`.
|
||||
// The sets `a` and `b` are not modified.
|
||||
func Difference(a, b interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(map[ty.A]bool, map[ty.A]bool) map[ty.A]bool),
|
||||
a, b)
|
||||
va, vb, tc := chk.Args[0], chk.Args[1], chk.Returns[0]
|
||||
|
||||
vtrue := reflect.ValueOf(true)
|
||||
vc := reflect.MakeMap(tc)
|
||||
for _, vkey := range va.MapKeys() {
|
||||
if !vb.MapIndex(vkey).IsValid() {
|
||||
vc.SetMapIndex(vkey, vtrue)
|
||||
}
|
||||
}
|
||||
return vc.Interface()
|
||||
}
|
84
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/set_test.go
generated
vendored
Normal file
84
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/set_test.go
generated
vendored
Normal file
|
@ -0,0 +1,84 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSet(t *testing.T) {
|
||||
a := []string{"andrew", "plato", "andrew", "cauchy", "cauchy", "andrew"}
|
||||
set := Set(a).(map[string]bool)
|
||||
|
||||
assertDeep(t, set, map[string]bool{
|
||||
"andrew": true,
|
||||
"plato": true,
|
||||
"cauchy": true,
|
||||
})
|
||||
}
|
||||
|
||||
func TestUnion(t *testing.T) {
|
||||
a := map[string]bool{
|
||||
"springsteen": true,
|
||||
"jgeils": true,
|
||||
"seger": true,
|
||||
"metallica": true,
|
||||
}
|
||||
b := map[string]bool{
|
||||
"metallica": true,
|
||||
"chesney": true,
|
||||
"mcgraw": true,
|
||||
"cash": true,
|
||||
}
|
||||
c := Union(a, b).(map[string]bool)
|
||||
|
||||
assertDeep(t, c, map[string]bool{
|
||||
"springsteen": true,
|
||||
"jgeils": true,
|
||||
"seger": true,
|
||||
"metallica": true,
|
||||
"chesney": true,
|
||||
"mcgraw": true,
|
||||
"cash": true,
|
||||
})
|
||||
}
|
||||
|
||||
func TestIntersection(t *testing.T) {
|
||||
a := map[string]bool{
|
||||
"springsteen": true,
|
||||
"jgeils": true,
|
||||
"seger": true,
|
||||
"metallica": true,
|
||||
}
|
||||
b := map[string]bool{
|
||||
"metallica": true,
|
||||
"chesney": true,
|
||||
"mcgraw": true,
|
||||
"cash": true,
|
||||
}
|
||||
c := Intersection(a, b).(map[string]bool)
|
||||
|
||||
assertDeep(t, c, map[string]bool{
|
||||
"metallica": true,
|
||||
})
|
||||
}
|
||||
|
||||
func TestDifference(t *testing.T) {
|
||||
a := map[string]bool{
|
||||
"springsteen": true,
|
||||
"jgeils": true,
|
||||
"seger": true,
|
||||
"metallica": true,
|
||||
}
|
||||
b := map[string]bool{
|
||||
"metallica": true,
|
||||
"chesney": true,
|
||||
"mcgraw": true,
|
||||
"cash": true,
|
||||
}
|
||||
c := Difference(a, b).(map[string]bool)
|
||||
|
||||
assertDeep(t, c, map[string]bool{
|
||||
"springsteen": true,
|
||||
"jgeils": true,
|
||||
"seger": true,
|
||||
})
|
||||
}
|
98
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/sort.go
generated
vendored
Normal file
98
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/sort.go
generated
vendored
Normal file
|
@ -0,0 +1,98 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
|
||||
"github.com/BurntSushi/ty"
|
||||
)
|
||||
|
||||
// QuickSort has a parametric type:
|
||||
//
|
||||
// func QuickSort(less func(x1 A, x2 A) bool, []A) []A
|
||||
//
|
||||
// QuickSort applies the "quicksort" algorithm to return a new sorted list
|
||||
// of `xs`, where `xs` is not modified.
|
||||
//
|
||||
// `less` should be a function that returns true if and only if `x1` is less
|
||||
// than `x2`.
|
||||
func QuickSort(less, xs interface{}) interface{} {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A, ty.A) bool, []ty.A) []ty.A),
|
||||
less, xs)
|
||||
vless, vxs, tys := chk.Args[0], chk.Args[1], chk.Returns[0]
|
||||
|
||||
var qsort func(left, right int)
|
||||
var partition func(left, right, pivot int) int
|
||||
xsind := Range(0, vxs.Len())
|
||||
|
||||
qsort = func(left, right int) {
|
||||
if left >= right {
|
||||
return
|
||||
}
|
||||
pivot := (left + right) / 2
|
||||
pivot = partition(left, right, pivot)
|
||||
|
||||
qsort(left, pivot-1)
|
||||
qsort(pivot+1, right)
|
||||
}
|
||||
partition = func(left, right, pivot int) int {
|
||||
vpivot := xsind[pivot]
|
||||
xsind[pivot], xsind[right] = xsind[right], xsind[pivot]
|
||||
|
||||
ind := left
|
||||
for i := left; i < right; i++ {
|
||||
if call1(vless, vxs.Index(xsind[i]), vxs.Index(vpivot)).Bool() {
|
||||
xsind[i], xsind[ind] = xsind[ind], xsind[i]
|
||||
ind++
|
||||
}
|
||||
}
|
||||
xsind[ind], xsind[right] = xsind[right], xsind[ind]
|
||||
return ind
|
||||
}
|
||||
|
||||
// Sort `xsind` in place.
|
||||
qsort(0, len(xsind)-1)
|
||||
|
||||
vys := reflect.MakeSlice(tys, len(xsind), len(xsind))
|
||||
for i, xsIndex := range xsind {
|
||||
vys.Index(i).Set(vxs.Index(xsIndex))
|
||||
}
|
||||
return vys.Interface()
|
||||
}
|
||||
|
||||
// Sort has a parametric type:
|
||||
//
|
||||
// func Sort(less func(x1 A, x2 A) bool, []A)
|
||||
//
|
||||
// Sort uses the standard library `sort` package to sort `xs` in place.
|
||||
//
|
||||
// `less` should be a function that returns true if and only if `x1` is less
|
||||
// than `x2`.
|
||||
func Sort(less, xs interface{}) {
|
||||
chk := ty.Check(
|
||||
new(func(func(ty.A, ty.A) bool, []ty.A)),
|
||||
less, xs)
|
||||
|
||||
vless, vxs := chk.Args[0], chk.Args[1]
|
||||
sort.Sort(&sortable{vless, vxs, swapperOf(vxs.Type().Elem())})
|
||||
}
|
||||
|
||||
type sortable struct {
|
||||
less reflect.Value
|
||||
xs reflect.Value
|
||||
swapper swapper
|
||||
}
|
||||
|
||||
func (s *sortable) Less(i, j int) bool {
|
||||
ith, jth := s.xs.Index(i), s.xs.Index(j)
|
||||
return call1(s.less, ith, jth).Bool()
|
||||
}
|
||||
|
||||
func (s *sortable) Swap(i, j int) {
|
||||
s.swapper.swap(s.xs.Index(i), s.xs.Index(j))
|
||||
}
|
||||
|
||||
func (s *sortable) Len() int {
|
||||
return s.xs.Len()
|
||||
}
|
120
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/sort_test.go
generated
vendored
Normal file
120
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/sort_test.go
generated
vendored
Normal file
|
@ -0,0 +1,120 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSort(t *testing.T) {
|
||||
tosort := []int{10, 3, 5, 1, 15, 6}
|
||||
Sort(func(a, b int) bool { return b < a }, tosort)
|
||||
|
||||
assertDeep(t, tosort, []int{15, 10, 6, 5, 3, 1})
|
||||
}
|
||||
|
||||
func TestQuickSort(t *testing.T) {
|
||||
tosort := []int{10, 3, 5, 1, 15, 6}
|
||||
sorted := QuickSort(
|
||||
func(a, b int) bool {
|
||||
return b < a
|
||||
}, tosort).([]int)
|
||||
|
||||
assertDeep(t, sorted, []int{15, 10, 6, 5, 3, 1})
|
||||
}
|
||||
|
||||
func BenchmarkSort(b *testing.B) {
|
||||
if flagBuiltin {
|
||||
benchmarkSortBuiltin(b)
|
||||
} else {
|
||||
benchmarkSortReflect(b)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkSortReflect(b *testing.B) {
|
||||
less := func(a, b int) bool { return a < b }
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
b.StopTimer()
|
||||
list := randIntSlice(1000, 0)
|
||||
b.StartTimer()
|
||||
|
||||
Sort(less, list)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkSortBuiltin(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
b.StopTimer()
|
||||
list := randIntSlice(1000, 0)
|
||||
b.StartTimer()
|
||||
|
||||
sort.Sort(sort.IntSlice(list))
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkQuickSort(b *testing.B) {
|
||||
if flagBuiltin {
|
||||
benchmarkQuickSortBuiltin(b)
|
||||
} else {
|
||||
benchmarkQuickSortReflect(b)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkQuickSortReflect(b *testing.B) {
|
||||
less := func(a, b int) bool { return a < b }
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
b.StopTimer()
|
||||
list := randIntSlice(1000, 0)
|
||||
b.StartTimer()
|
||||
|
||||
_ = QuickSort(less, list)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkQuickSortBuiltin(b *testing.B) {
|
||||
less := func(a, b int) bool { return a < b }
|
||||
|
||||
quicksort := func(xs []int) []int {
|
||||
ys := make([]int, len(xs))
|
||||
copy(ys, xs)
|
||||
|
||||
var qsort func(left, right int)
|
||||
var partition func(left, right, pivot int) int
|
||||
|
||||
qsort = func(left, right int) {
|
||||
if left >= right {
|
||||
return
|
||||
}
|
||||
pivot := (left + right) / 2
|
||||
pivot = partition(left, right, pivot)
|
||||
|
||||
qsort(left, pivot-1)
|
||||
qsort(pivot+1, right)
|
||||
}
|
||||
partition = func(left, right, pivot int) int {
|
||||
vpivot := ys[pivot]
|
||||
ys[pivot], ys[right] = ys[right], ys[pivot]
|
||||
|
||||
ind := left
|
||||
for i := left; i < right; i++ {
|
||||
if less(ys[i], vpivot) {
|
||||
ys[i], ys[ind] = ys[ind], ys[i]
|
||||
ind++
|
||||
}
|
||||
}
|
||||
ys[ind], ys[right] = ys[right], ys[ind]
|
||||
return ind
|
||||
}
|
||||
|
||||
return ys
|
||||
}
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
b.StopTimer()
|
||||
list := randIntSlice(1000, 0)
|
||||
b.StartTimer()
|
||||
|
||||
_ = quicksort(list)
|
||||
}
|
||||
}
|
37
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/util.go
generated
vendored
Normal file
37
Godeps/_workspace/src/github.com/BurntSushi/ty/fun/util.go
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
package fun
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
)
|
||||
|
||||
func zeroValue(typ reflect.Type) reflect.Value {
|
||||
return reflect.New(typ).Elem()
|
||||
}
|
||||
|
||||
type swapper reflect.Value
|
||||
|
||||
func swapperOf(typ reflect.Type) swapper {
|
||||
return swapper(zeroValue(typ))
|
||||
}
|
||||
|
||||
func (s swapper) swap(a, b reflect.Value) {
|
||||
vs := reflect.Value(s)
|
||||
vs.Set(a)
|
||||
a.Set(b)
|
||||
b.Set(vs)
|
||||
}
|
||||
|
||||
func call(f reflect.Value, args ...reflect.Value) {
|
||||
f.Call(args)
|
||||
}
|
||||
|
||||
func call1(f reflect.Value, args ...reflect.Value) reflect.Value {
|
||||
return f.Call(args)[0]
|
||||
}
|
||||
|
||||
func call2(f reflect.Value, args ...reflect.Value) (
|
||||
reflect.Value, reflect.Value) {
|
||||
|
||||
ret := f.Call(args)
|
||||
return ret[0], ret[1]
|
||||
}
|
35
Godeps/_workspace/src/github.com/BurntSushi/ty/perf/builtin-vs-reflect.bench
generated
vendored
Normal file
35
Godeps/_workspace/src/github.com/BurntSushi/ty/perf/builtin-vs-reflect.bench
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
benchmark old ns/op new ns/op delta
|
||||
BenchmarkFibonacciMemo-12 5895 43896 +644.63%
|
||||
BenchmarkFibonacciNoMemo-12 6827001 6829859 +0.04%
|
||||
BenchmarkParMapSquare-12 408320 572307 +40.16%
|
||||
BenchmarkParMapPrime-12 5289594 5510075 +4.17%
|
||||
BenchmarkMapSquare-12 8499 457844 +5287.03%
|
||||
BenchmarkMapPrime-12 34265372 32220176 -5.97%
|
||||
BenchmarkShuffle-12 240036 1018408 +324.27%
|
||||
BenchmarkSample-12 262565 271122 +3.26%
|
||||
BenchmarkSort-12 137293 7716737 +5520.63%
|
||||
BenchmarkQuickSort-12 6325 6051563 +95576.89%
|
||||
|
||||
benchmark old allocs new allocs delta
|
||||
BenchmarkFibonacciMemo-12 11 198 1700.00%
|
||||
BenchmarkFibonacciNoMemo-12 0 0 n/a%
|
||||
BenchmarkParMapSquare-12 21 2038 9604.76%
|
||||
BenchmarkParMapPrime-12 7148 9160 28.15%
|
||||
BenchmarkMapSquare-12 1 2014 201300.00%
|
||||
BenchmarkMapPrime-12 7105 9173 29.11%
|
||||
BenchmarkShuffle-12 0 7 n/a%
|
||||
BenchmarkSample-12 2 11 450.00%
|
||||
BenchmarkSort-12 1 27032 2703100.00%
|
||||
BenchmarkQuickSort-12 1 22008 2200700.00%
|
||||
|
||||
benchmark old bytes new bytes delta
|
||||
BenchmarkFibonacciMemo-12 2449 12083 393.39%
|
||||
BenchmarkFibonacciNoMemo-12 0 0 n/a%
|
||||
BenchmarkParMapSquare-12 9127 60330 561.01%
|
||||
BenchmarkParMapPrime-12 156540 222731 42.28%
|
||||
BenchmarkMapSquare-12 8195 58053 608.40%
|
||||
BenchmarkMapPrime-12 155952 221255 41.87%
|
||||
BenchmarkShuffle-12 0 500 n/a%
|
||||
BenchmarkSample-12 82785 83383 0.72%
|
||||
BenchmarkSort-12 35 883869 2525240.00%
|
||||
BenchmarkQuickSort-12 8195 735645 8876.75%
|
1
Godeps/_workspace/src/github.com/BurntSushi/ty/session.vim
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/BurntSushi/ty/session.vim
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
au BufWritePost *.go silent!make tags > /dev/null 2>&1
|
338
Godeps/_workspace/src/github.com/BurntSushi/ty/type-check.go
generated
vendored
Normal file
338
Godeps/_workspace/src/github.com/BurntSushi/ty/type-check.go
generated
vendored
Normal file
|
@ -0,0 +1,338 @@
|
|||
package ty
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// TypeError corresponds to any error reported by the `Check` function.
|
||||
// Since `Check` panics, if you want to run `Check` safely, it is
|
||||
// appropriate to recover and use a type switch to discover a `TypeError`
|
||||
// value.
|
||||
type TypeError string
|
||||
|
||||
func (te TypeError) Error() string {
|
||||
return string(te)
|
||||
}
|
||||
|
||||
func pe(format string, v ...interface{}) TypeError {
|
||||
return TypeError(fmt.Sprintf(format, v...))
|
||||
}
|
||||
|
||||
func ppe(format string, v ...interface{}) {
|
||||
panic(pe(format, v...))
|
||||
}
|
||||
|
||||
// Typed corresponds to the information returned by `Check`.
|
||||
type Typed struct {
|
||||
// In correspondence with the `as` parameter to `Check`.
|
||||
Args []reflect.Value
|
||||
|
||||
// In correspondence with the return types of `f` in `Check`.
|
||||
Returns []reflect.Type
|
||||
|
||||
// The type environment generated via unification in `Check`.
|
||||
// (Its usefulness in the public API is questionable.)
|
||||
TypeEnv map[string]reflect.Type
|
||||
}
|
||||
|
||||
// Check accepts a function `f`, which may have a parametric type, along with a
|
||||
// number of arguments in correspondence with the arguments to `f`,
|
||||
// and returns inferred Go type information. This type information includes
|
||||
// a list of `reflect.Value` in correspondence with `as`, a list of
|
||||
// `reflect.Type` in correspondence with the return types of `f` and a type
|
||||
// environment mapping type variables to `reflect.Type`.
|
||||
//
|
||||
// The power of `Check` comes from the following invariant: if `Check` returns,
|
||||
// then the types of the arguments corresponding to `as` are consistent
|
||||
// with the parametric type of `f`, *and* the parametric return types of `f`
|
||||
// were made into valid Go types that are not parametric. Otherwise, there is
|
||||
// a bug in `Check`.
|
||||
//
|
||||
// More concretely, consider a simple parametric function `Map`, which
|
||||
// transforms a list of elements by applying a function to each element in
|
||||
// order to generate a new list. Such a function constructed only for integers
|
||||
// might have a type like
|
||||
//
|
||||
// func Map(func(int) int, []int) []int
|
||||
//
|
||||
// But the parametric type of `Map` could be given with
|
||||
//
|
||||
// func Map(func(A) B, []A) []B
|
||||
//
|
||||
// which in English reads, "Given a function from any type `A` to any type `B`
|
||||
// and a slice of `A`, `Map` returns a slice of `B`."
|
||||
//
|
||||
// To write a parametric function like `Map`, one can pass a pointer
|
||||
// to a nil function of the desired parametric type to get the reflection
|
||||
// information:
|
||||
//
|
||||
// func Map(f, xs interface{}) interface{} {
|
||||
// // Given the parametric type and the arguments, Check will
|
||||
// // return all the reflection information you need to write `Map`.
|
||||
// uni := ty.Check(
|
||||
// new(func(func(ty.A) ty.B, []ty.A) []ty.B),
|
||||
// f, xs)
|
||||
//
|
||||
// // `vf` and `vxs` are `reflect.Value`s of `f` and `xs`.
|
||||
// vf, vxs := uni.Args[0], uni.Args[1]
|
||||
//
|
||||
// // `tys` is a `reflect.Type` of `[]ty.B` where `ty.B` is replaced
|
||||
// // with the return type of the given function `f`.
|
||||
// tys := uni.Returns[0]
|
||||
//
|
||||
// // Given the promise of `Check`, we now know that `vf` has
|
||||
// // type `func(ty.A) ty.B` and `vxs` has type `[]ty.A`.
|
||||
// xsLen := vxs.Len()
|
||||
//
|
||||
// // Constructs a new slice which will have type `[]ty.B`.
|
||||
// vys := reflect.MakeSlice(tys, xsLen, xsLen)
|
||||
//
|
||||
// // Actually perform the `Map` operation, but in the world of
|
||||
// // reflection.
|
||||
// for i := 0; i < xsLen; i++ {
|
||||
// vy := vf.Call([]reflect.Value{vxs.Index(i)})[0]
|
||||
// vys.Index(i).Set(vy)
|
||||
// }
|
||||
//
|
||||
// // The `reflect.Value.Interface` method is how we exit the world of
|
||||
// // reflection. The onus is now on the caller to type assert it to
|
||||
// // the appropriate type.
|
||||
// return vys.Interface()
|
||||
// }
|
||||
//
|
||||
// Working in the reflection world is certainly more inconvenient than writing
|
||||
// regular Go code, but the information and invariants held by `Check` provide
|
||||
// a more convenient experience than how one normally works with reflection.
|
||||
// (Notice that there is no error-prone type switching or boiler plate to
|
||||
// construct new types, since `Check` guarantees the types are consistent
|
||||
// with the inputs for us.)
|
||||
//
|
||||
// And while writing such functions is still not so convenient,
|
||||
// invoking them is simple:
|
||||
//
|
||||
// square := func(x int) int { return x * x }
|
||||
// squared := Map(square, []int{1, 2, 3, 4, 5}).([]int)
|
||||
//
|
||||
// Restrictions
|
||||
//
|
||||
// There are a few restrictions imposed on the parametric return types of
|
||||
// `f`: type variables may only be found in types that can be composed by the
|
||||
// `reflect` package. This *only* includes channels, maps, pointers and slices.
|
||||
// If a type variable is found in an array, function or struct, `Check` will
|
||||
// panic.
|
||||
//
|
||||
// Also, type variables inside of structs are ignored in the types of the
|
||||
// arguments `as`. This restriction may be lifted in the future.
|
||||
//
|
||||
// To be clear: type variables *may* appear in arrays or functions in the types
|
||||
// of the arguments `as`.
|
||||
func Check(f interface{}, as ...interface{}) *Typed {
|
||||
rf := reflect.ValueOf(f)
|
||||
tf := rf.Type()
|
||||
|
||||
if tf.Kind() == reflect.Ptr {
|
||||
rf = reflect.Indirect(rf)
|
||||
tf = rf.Type()
|
||||
}
|
||||
if tf.Kind() != reflect.Func {
|
||||
ppe("The type of `f` must be a function, but it is a '%s'.", tf.Kind())
|
||||
}
|
||||
if tf.NumIn() != len(as) {
|
||||
ppe("`f` expects %d arguments, but only %d were given.",
|
||||
tf.NumIn(), len(as))
|
||||
}
|
||||
|
||||
// Populate the argument value list.
|
||||
args := make([]reflect.Value, len(as))
|
||||
for i := 0; i < len(as); i++ {
|
||||
args[i] = reflect.ValueOf(as[i])
|
||||
}
|
||||
|
||||
// Populate our type variable environment through unification.
|
||||
tyenv := make(tyenv)
|
||||
for i := 0; i < len(args); i++ {
|
||||
tp := typePair{tyenv, tf.In(i), args[i].Type()}
|
||||
|
||||
// Mutates the type variable environment.
|
||||
if err := tp.unify(tp.param, tp.input); err != nil {
|
||||
argTypes := make([]string, len(args))
|
||||
for i := range args {
|
||||
argTypes[i] = args[i].Type().String()
|
||||
}
|
||||
ppe("\nError type checking\n\t%s\nwith argument types\n\t(%s)\n%s",
|
||||
tf, strings.Join(argTypes, ", "), err)
|
||||
}
|
||||
}
|
||||
|
||||
// Now substitute those types into the return types of `f`.
|
||||
retTypes := make([]reflect.Type, tf.NumOut())
|
||||
for i := 0; i < tf.NumOut(); i++ {
|
||||
retTypes[i] = (&returnType{tyenv, tf.Out(i)}).tysubst(tf.Out(i))
|
||||
}
|
||||
return &Typed{args, retTypes, map[string]reflect.Type(tyenv)}
|
||||
}
|
||||
|
||||
// tyenv maps type variable names to their inferred Go type.
|
||||
type tyenv map[string]reflect.Type
|
||||
|
||||
// typePair represents a pair of types to be unified. They act as a way to
|
||||
// report sensible error messages from within the unification algorithm.
|
||||
//
|
||||
// It also includes a type environment, which is mutated during unification.
|
||||
type typePair struct {
|
||||
tyenv tyenv
|
||||
param reflect.Type
|
||||
input reflect.Type
|
||||
}
|
||||
|
||||
func (tp typePair) error(format string, v ...interface{}) error {
|
||||
return pe("Type error when unifying type '%s' and '%s': %s",
|
||||
tp.param, tp.input, fmt.Sprintf(format, v...))
|
||||
}
|
||||
|
||||
// unify attempts to satisfy a pair of types, where the `param` type is the
|
||||
// expected type of a function argument and the `input` type is the known
|
||||
// type of a function argument. The `param` type may be parametric (that is,
|
||||
// it may contain a type that is convertible to TypeVariable) but the
|
||||
// `input` type may *not* be parametric.
|
||||
//
|
||||
// Any failure to unify the two types results in a panic.
|
||||
//
|
||||
// The end result of unification is a type environment: a set of substitutions
|
||||
// from type variable to a Go type.
|
||||
func (tp typePair) unify(param, input reflect.Type) error {
|
||||
if tyname := tyvarName(input); len(tyname) > 0 {
|
||||
return tp.error("Type variables are not allowed in the types of " +
|
||||
"arguments.")
|
||||
}
|
||||
if tyname := tyvarName(param); len(tyname) > 0 {
|
||||
if cur, ok := tp.tyenv[tyname]; ok && cur != input {
|
||||
return tp.error("Type variable %s expected type '%s' but got '%s'.",
|
||||
tyname, cur, input)
|
||||
} else if !ok {
|
||||
tp.tyenv[tyname] = input
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if param.Kind() != input.Kind() {
|
||||
return tp.error("Cannot unify different kinds of types '%s' and '%s'.",
|
||||
param, input)
|
||||
}
|
||||
|
||||
switch param.Kind() {
|
||||
case reflect.Array:
|
||||
return tp.unify(param.Elem(), input.Elem())
|
||||
case reflect.Chan:
|
||||
if param.ChanDir() != input.ChanDir() {
|
||||
return tp.error("Cannot unify '%s' with '%s' "+
|
||||
"(channel directions are different: '%s' != '%s').",
|
||||
param, input, param.ChanDir(), input.ChanDir())
|
||||
}
|
||||
return tp.unify(param.Elem(), input.Elem())
|
||||
case reflect.Func:
|
||||
if param.NumIn() != input.NumIn() || param.NumOut() != input.NumOut() {
|
||||
return tp.error("Cannot unify '%s' with '%s'.", param, input)
|
||||
}
|
||||
for i := 0; i < param.NumIn(); i++ {
|
||||
if err := tp.unify(param.In(i), input.In(i)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for i := 0; i < param.NumOut(); i++ {
|
||||
if err := tp.unify(param.Out(i), input.Out(i)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case reflect.Map:
|
||||
if err := tp.unify(param.Key(), input.Key()); err != nil {
|
||||
return err
|
||||
}
|
||||
return tp.unify(param.Elem(), input.Elem())
|
||||
case reflect.Ptr:
|
||||
return tp.unify(param.Elem(), input.Elem())
|
||||
case reflect.Slice:
|
||||
return tp.unify(param.Elem(), input.Elem())
|
||||
}
|
||||
|
||||
// The only other container types are Interface and Struct.
|
||||
// I am unsure about what to do with interfaces. Mind is fuzzy.
|
||||
// Structs? I don't think it really makes much sense to use type
|
||||
// variables inside of them.
|
||||
return nil
|
||||
}
|
||||
|
||||
// returnType corresponds to the type of a single return value of a function,
|
||||
// in which the type may be parametric. It also contains a type environment
|
||||
// constructed from unification.
|
||||
type returnType struct {
|
||||
tyenv tyenv
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
func (rt returnType) panic(format string, v ...interface{}) {
|
||||
ppe("Error substituting in return type '%s': %s",
|
||||
rt.typ, fmt.Sprintf(format, v...))
|
||||
}
|
||||
|
||||
// tysubst attempts to substitute all type variables within a single return
|
||||
// type with their corresponding Go type from the type environment.
|
||||
//
|
||||
// tysubst will panic if a type variable is unbound, or if it encounters a
|
||||
// type that cannot be dynamically created. Such types include arrays,
|
||||
// functions and structs. (A limitation of the `reflect` package.)
|
||||
func (rt returnType) tysubst(typ reflect.Type) reflect.Type {
|
||||
if tyname := tyvarName(typ); len(tyname) > 0 {
|
||||
if thetype, ok := rt.tyenv[tyname]; !ok {
|
||||
rt.panic("Unbound type variable %s.", tyname)
|
||||
} else {
|
||||
return thetype
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.Array:
|
||||
rt.panic("Cannot dynamically create Array types.")
|
||||
case reflect.Chan:
|
||||
return reflect.ChanOf(typ.ChanDir(), rt.tysubst(typ.Elem()))
|
||||
case reflect.Func:
|
||||
rt.panic("Cannot dynamically create Function types.")
|
||||
case reflect.Interface:
|
||||
// rt.panic("TODO")
|
||||
// Not sure if this is right.
|
||||
return typ
|
||||
case reflect.Map:
|
||||
return reflect.MapOf(rt.tysubst(typ.Key()), rt.tysubst(typ.Elem()))
|
||||
case reflect.Ptr:
|
||||
return reflect.PtrTo(rt.tysubst(typ.Elem()))
|
||||
case reflect.Slice:
|
||||
return reflect.SliceOf(rt.tysubst(typ.Elem()))
|
||||
case reflect.Struct:
|
||||
rt.panic("Cannot dynamically create Struct types.")
|
||||
case reflect.UnsafePointer:
|
||||
rt.panic("Cannot dynamically create unsafe.Pointer types.")
|
||||
}
|
||||
|
||||
// We've covered all the composite types, so we're only left with
|
||||
// base types.
|
||||
return typ
|
||||
}
|
||||
|
||||
func tyvarName(t reflect.Type) string {
|
||||
if !t.ConvertibleTo(tyvarUnderlyingType) {
|
||||
return ""
|
||||
}
|
||||
return t.Name()
|
||||
}
|
||||
|
||||
// AssertType panics with a `TypeError` if `v` does not have type `t`.
|
||||
// Otherwise, it returns the `reflect.Value` of `v`.
|
||||
func AssertType(v interface{}, t reflect.Type) reflect.Value {
|
||||
rv := reflect.ValueOf(v)
|
||||
tv := rv.Type()
|
||||
if tv != t {
|
||||
ppe("Value '%v' has type '%s' but expected '%s'.", v, tv, t)
|
||||
}
|
||||
return rv
|
||||
}
|
28
Godeps/_workspace/src/github.com/BurntSushi/ty/tyvars.go
generated
vendored
Normal file
28
Godeps/_workspace/src/github.com/BurntSushi/ty/tyvars.go
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
package ty
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// TypeVariable is the underlying type of every type variable used in
|
||||
// parametric types. It should not be used directly. Instead, use
|
||||
//
|
||||
// type myOwnTypeVariable TypeVariable
|
||||
//
|
||||
// to create your own type variable. For your convenience, this package
|
||||
// defines some type variables for you. (e.g., `A`, `B`, `C`, ...)
|
||||
type TypeVariable struct {
|
||||
noImitation struct{}
|
||||
}
|
||||
|
||||
// tyvarUnderlyingType is used to discover types that are type variables.
|
||||
// Namely, any type variable must be convertible to `TypeVariable`.
|
||||
var tyvarUnderlyingType = reflect.TypeOf(TypeVariable{})
|
||||
|
||||
type A TypeVariable
|
||||
type B TypeVariable
|
||||
type C TypeVariable
|
||||
type D TypeVariable
|
||||
type E TypeVariable
|
||||
type F TypeVariable
|
||||
type G TypeVariable
|
25
Godeps/_workspace/src/github.com/alecthomas/template/README.md
generated
vendored
Normal file
25
Godeps/_workspace/src/github.com/alecthomas/template/README.md
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Go's `text/template` package with newline elision
|
||||
|
||||
This is a fork of Go 1.4's [text/template](http://golang.org/pkg/text/template/) package with one addition: a backslash immediately after a closing delimiter will delete all subsequent newlines until a non-newline.
|
||||
|
||||
eg.
|
||||
|
||||
```
|
||||
{{if true}}\
|
||||
hello
|
||||
{{end}}\
|
||||
```
|
||||
|
||||
Will result in:
|
||||
|
||||
```
|
||||
hello\n
|
||||
```
|
||||
|
||||
Rather than:
|
||||
|
||||
```
|
||||
\n
|
||||
hello\n
|
||||
\n
|
||||
```
|
406
Godeps/_workspace/src/github.com/alecthomas/template/doc.go
generated
vendored
Normal file
406
Godeps/_workspace/src/github.com/alecthomas/template/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,406 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
Package template implements data-driven templates for generating textual output.
|
||||
|
||||
To generate HTML output, see package html/template, which has the same interface
|
||||
as this package but automatically secures HTML output against certain attacks.
|
||||
|
||||
Templates are executed by applying them to a data structure. Annotations in the
|
||||
template refer to elements of the data structure (typically a field of a struct
|
||||
or a key in a map) to control execution and derive values to be displayed.
|
||||
Execution of the template walks the structure and sets the cursor, represented
|
||||
by a period '.' and called "dot", to the value at the current location in the
|
||||
structure as execution proceeds.
|
||||
|
||||
The input text for a template is UTF-8-encoded text in any format.
|
||||
"Actions"--data evaluations or control structures--are delimited by
|
||||
"{{" and "}}"; all text outside actions is copied to the output unchanged.
|
||||
Actions may not span newlines, although comments can.
|
||||
|
||||
Once parsed, a template may be executed safely in parallel.
|
||||
|
||||
Here is a trivial example that prints "17 items are made of wool".
|
||||
|
||||
type Inventory struct {
|
||||
Material string
|
||||
Count uint
|
||||
}
|
||||
sweaters := Inventory{"wool", 17}
|
||||
tmpl, err := template.New("test").Parse("{{.Count}} items are made of {{.Material}}")
|
||||
if err != nil { panic(err) }
|
||||
err = tmpl.Execute(os.Stdout, sweaters)
|
||||
if err != nil { panic(err) }
|
||||
|
||||
More intricate examples appear below.
|
||||
|
||||
Actions
|
||||
|
||||
Here is the list of actions. "Arguments" and "pipelines" are evaluations of
|
||||
data, defined in detail below.
|
||||
|
||||
*/
|
||||
// {{/* a comment */}}
|
||||
// A comment; discarded. May contain newlines.
|
||||
// Comments do not nest and must start and end at the
|
||||
// delimiters, as shown here.
|
||||
/*
|
||||
|
||||
{{pipeline}}
|
||||
The default textual representation of the value of the pipeline
|
||||
is copied to the output.
|
||||
|
||||
{{if pipeline}} T1 {{end}}
|
||||
If the value of the pipeline is empty, no output is generated;
|
||||
otherwise, T1 is executed. The empty values are false, 0, any
|
||||
nil pointer or interface value, and any array, slice, map, or
|
||||
string of length zero.
|
||||
Dot is unaffected.
|
||||
|
||||
{{if pipeline}} T1 {{else}} T0 {{end}}
|
||||
If the value of the pipeline is empty, T0 is executed;
|
||||
otherwise, T1 is executed. Dot is unaffected.
|
||||
|
||||
{{if pipeline}} T1 {{else if pipeline}} T0 {{end}}
|
||||
To simplify the appearance of if-else chains, the else action
|
||||
of an if may include another if directly; the effect is exactly
|
||||
the same as writing
|
||||
{{if pipeline}} T1 {{else}}{{if pipeline}} T0 {{end}}{{end}}
|
||||
|
||||
{{range pipeline}} T1 {{end}}
|
||||
The value of the pipeline must be an array, slice, map, or channel.
|
||||
If the value of the pipeline has length zero, nothing is output;
|
||||
otherwise, dot is set to the successive elements of the array,
|
||||
slice, or map and T1 is executed. If the value is a map and the
|
||||
keys are of basic type with a defined order ("comparable"), the
|
||||
elements will be visited in sorted key order.
|
||||
|
||||
{{range pipeline}} T1 {{else}} T0 {{end}}
|
||||
The value of the pipeline must be an array, slice, map, or channel.
|
||||
If the value of the pipeline has length zero, dot is unaffected and
|
||||
T0 is executed; otherwise, dot is set to the successive elements
|
||||
of the array, slice, or map and T1 is executed.
|
||||
|
||||
{{template "name"}}
|
||||
The template with the specified name is executed with nil data.
|
||||
|
||||
{{template "name" pipeline}}
|
||||
The template with the specified name is executed with dot set
|
||||
to the value of the pipeline.
|
||||
|
||||
{{with pipeline}} T1 {{end}}
|
||||
If the value of the pipeline is empty, no output is generated;
|
||||
otherwise, dot is set to the value of the pipeline and T1 is
|
||||
executed.
|
||||
|
||||
{{with pipeline}} T1 {{else}} T0 {{end}}
|
||||
If the value of the pipeline is empty, dot is unaffected and T0
|
||||
is executed; otherwise, dot is set to the value of the pipeline
|
||||
and T1 is executed.
|
||||
|
||||
Arguments
|
||||
|
||||
An argument is a simple value, denoted by one of the following.
|
||||
|
||||
- A boolean, string, character, integer, floating-point, imaginary
|
||||
or complex constant in Go syntax. These behave like Go's untyped
|
||||
constants, although raw strings may not span newlines.
|
||||
- The keyword nil, representing an untyped Go nil.
|
||||
- The character '.' (period):
|
||||
.
|
||||
The result is the value of dot.
|
||||
- A variable name, which is a (possibly empty) alphanumeric string
|
||||
preceded by a dollar sign, such as
|
||||
$piOver2
|
||||
or
|
||||
$
|
||||
The result is the value of the variable.
|
||||
Variables are described below.
|
||||
- The name of a field of the data, which must be a struct, preceded
|
||||
by a period, such as
|
||||
.Field
|
||||
The result is the value of the field. Field invocations may be
|
||||
chained:
|
||||
.Field1.Field2
|
||||
Fields can also be evaluated on variables, including chaining:
|
||||
$x.Field1.Field2
|
||||
- The name of a key of the data, which must be a map, preceded
|
||||
by a period, such as
|
||||
.Key
|
||||
The result is the map element value indexed by the key.
|
||||
Key invocations may be chained and combined with fields to any
|
||||
depth:
|
||||
.Field1.Key1.Field2.Key2
|
||||
Although the key must be an alphanumeric identifier, unlike with
|
||||
field names they do not need to start with an upper case letter.
|
||||
Keys can also be evaluated on variables, including chaining:
|
||||
$x.key1.key2
|
||||
- The name of a niladic method of the data, preceded by a period,
|
||||
such as
|
||||
.Method
|
||||
The result is the value of invoking the method with dot as the
|
||||
receiver, dot.Method(). Such a method must have one return value (of
|
||||
any type) or two return values, the second of which is an error.
|
||||
If it has two and the returned error is non-nil, execution terminates
|
||||
and an error is returned to the caller as the value of Execute.
|
||||
Method invocations may be chained and combined with fields and keys
|
||||
to any depth:
|
||||
.Field1.Key1.Method1.Field2.Key2.Method2
|
||||
Methods can also be evaluated on variables, including chaining:
|
||||
$x.Method1.Field
|
||||
- The name of a niladic function, such as
|
||||
fun
|
||||
The result is the value of invoking the function, fun(). The return
|
||||
types and values behave as in methods. Functions and function
|
||||
names are described below.
|
||||
- A parenthesized instance of one the above, for grouping. The result
|
||||
may be accessed by a field or map key invocation.
|
||||
print (.F1 arg1) (.F2 arg2)
|
||||
(.StructValuedMethod "arg").Field
|
||||
|
||||
Arguments may evaluate to any type; if they are pointers the implementation
|
||||
automatically indirects to the base type when required.
|
||||
If an evaluation yields a function value, such as a function-valued
|
||||
field of a struct, the function is not invoked automatically, but it
|
||||
can be used as a truth value for an if action and the like. To invoke
|
||||
it, use the call function, defined below.
|
||||
|
||||
A pipeline is a possibly chained sequence of "commands". A command is a simple
|
||||
value (argument) or a function or method call, possibly with multiple arguments:
|
||||
|
||||
Argument
|
||||
The result is the value of evaluating the argument.
|
||||
.Method [Argument...]
|
||||
The method can be alone or the last element of a chain but,
|
||||
unlike methods in the middle of a chain, it can take arguments.
|
||||
The result is the value of calling the method with the
|
||||
arguments:
|
||||
dot.Method(Argument1, etc.)
|
||||
functionName [Argument...]
|
||||
The result is the value of calling the function associated
|
||||
with the name:
|
||||
function(Argument1, etc.)
|
||||
Functions and function names are described below.
|
||||
|
||||
Pipelines
|
||||
|
||||
A pipeline may be "chained" by separating a sequence of commands with pipeline
|
||||
characters '|'. In a chained pipeline, the result of the each command is
|
||||
passed as the last argument of the following command. The output of the final
|
||||
command in the pipeline is the value of the pipeline.
|
||||
|
||||
The output of a command will be either one value or two values, the second of
|
||||
which has type error. If that second value is present and evaluates to
|
||||
non-nil, execution terminates and the error is returned to the caller of
|
||||
Execute.
|
||||
|
||||
Variables
|
||||
|
||||
A pipeline inside an action may initialize a variable to capture the result.
|
||||
The initialization has syntax
|
||||
|
||||
$variable := pipeline
|
||||
|
||||
where $variable is the name of the variable. An action that declares a
|
||||
variable produces no output.
|
||||
|
||||
If a "range" action initializes a variable, the variable is set to the
|
||||
successive elements of the iteration. Also, a "range" may declare two
|
||||
variables, separated by a comma:
|
||||
|
||||
range $index, $element := pipeline
|
||||
|
||||
in which case $index and $element are set to the successive values of the
|
||||
array/slice index or map key and element, respectively. Note that if there is
|
||||
only one variable, it is assigned the element; this is opposite to the
|
||||
convention in Go range clauses.
|
||||
|
||||
A variable's scope extends to the "end" action of the control structure ("if",
|
||||
"with", or "range") in which it is declared, or to the end of the template if
|
||||
there is no such control structure. A template invocation does not inherit
|
||||
variables from the point of its invocation.
|
||||
|
||||
When execution begins, $ is set to the data argument passed to Execute, that is,
|
||||
to the starting value of dot.
|
||||
|
||||
Examples
|
||||
|
||||
Here are some example one-line templates demonstrating pipelines and variables.
|
||||
All produce the quoted word "output":
|
||||
|
||||
{{"\"output\""}}
|
||||
A string constant.
|
||||
{{`"output"`}}
|
||||
A raw string constant.
|
||||
{{printf "%q" "output"}}
|
||||
A function call.
|
||||
{{"output" | printf "%q"}}
|
||||
A function call whose final argument comes from the previous
|
||||
command.
|
||||
{{printf "%q" (print "out" "put")}}
|
||||
A parenthesized argument.
|
||||
{{"put" | printf "%s%s" "out" | printf "%q"}}
|
||||
A more elaborate call.
|
||||
{{"output" | printf "%s" | printf "%q"}}
|
||||
A longer chain.
|
||||
{{with "output"}}{{printf "%q" .}}{{end}}
|
||||
A with action using dot.
|
||||
{{with $x := "output" | printf "%q"}}{{$x}}{{end}}
|
||||
A with action that creates and uses a variable.
|
||||
{{with $x := "output"}}{{printf "%q" $x}}{{end}}
|
||||
A with action that uses the variable in another action.
|
||||
{{with $x := "output"}}{{$x | printf "%q"}}{{end}}
|
||||
The same, but pipelined.
|
||||
|
||||
Functions
|
||||
|
||||
During execution functions are found in two function maps: first in the
|
||||
template, then in the global function map. By default, no functions are defined
|
||||
in the template but the Funcs method can be used to add them.
|
||||
|
||||
Predefined global functions are named as follows.
|
||||
|
||||
and
|
||||
Returns the boolean AND of its arguments by returning the
|
||||
first empty argument or the last argument, that is,
|
||||
"and x y" behaves as "if x then y else x". All the
|
||||
arguments are evaluated.
|
||||
call
|
||||
Returns the result of calling the first argument, which
|
||||
must be a function, with the remaining arguments as parameters.
|
||||
Thus "call .X.Y 1 2" is, in Go notation, dot.X.Y(1, 2) where
|
||||
Y is a func-valued field, map entry, or the like.
|
||||
The first argument must be the result of an evaluation
|
||||
that yields a value of function type (as distinct from
|
||||
a predefined function such as print). The function must
|
||||
return either one or two result values, the second of which
|
||||
is of type error. If the arguments don't match the function
|
||||
or the returned error value is non-nil, execution stops.
|
||||
html
|
||||
Returns the escaped HTML equivalent of the textual
|
||||
representation of its arguments.
|
||||
index
|
||||
Returns the result of indexing its first argument by the
|
||||
following arguments. Thus "index x 1 2 3" is, in Go syntax,
|
||||
x[1][2][3]. Each indexed item must be a map, slice, or array.
|
||||
js
|
||||
Returns the escaped JavaScript equivalent of the textual
|
||||
representation of its arguments.
|
||||
len
|
||||
Returns the integer length of its argument.
|
||||
not
|
||||
Returns the boolean negation of its single argument.
|
||||
or
|
||||
Returns the boolean OR of its arguments by returning the
|
||||
first non-empty argument or the last argument, that is,
|
||||
"or x y" behaves as "if x then x else y". All the
|
||||
arguments are evaluated.
|
||||
print
|
||||
An alias for fmt.Sprint
|
||||
printf
|
||||
An alias for fmt.Sprintf
|
||||
println
|
||||
An alias for fmt.Sprintln
|
||||
urlquery
|
||||
Returns the escaped value of the textual representation of
|
||||
its arguments in a form suitable for embedding in a URL query.
|
||||
|
||||
The boolean functions take any zero value to be false and a non-zero
|
||||
value to be true.
|
||||
|
||||
There is also a set of binary comparison operators defined as
|
||||
functions:
|
||||
|
||||
eq
|
||||
Returns the boolean truth of arg1 == arg2
|
||||
ne
|
||||
Returns the boolean truth of arg1 != arg2
|
||||
lt
|
||||
Returns the boolean truth of arg1 < arg2
|
||||
le
|
||||
Returns the boolean truth of arg1 <= arg2
|
||||
gt
|
||||
Returns the boolean truth of arg1 > arg2
|
||||
ge
|
||||
Returns the boolean truth of arg1 >= arg2
|
||||
|
||||
For simpler multi-way equality tests, eq (only) accepts two or more
|
||||
arguments and compares the second and subsequent to the first,
|
||||
returning in effect
|
||||
|
||||
arg1==arg2 || arg1==arg3 || arg1==arg4 ...
|
||||
|
||||
(Unlike with || in Go, however, eq is a function call and all the
|
||||
arguments will be evaluated.)
|
||||
|
||||
The comparison functions work on basic types only (or named basic
|
||||
types, such as "type Celsius float32"). They implement the Go rules
|
||||
for comparison of values, except that size and exact type are
|
||||
ignored, so any integer value, signed or unsigned, may be compared
|
||||
with any other integer value. (The arithmetic value is compared,
|
||||
not the bit pattern, so all negative integers are less than all
|
||||
unsigned integers.) However, as usual, one may not compare an int
|
||||
with a float32 and so on.
|
||||
|
||||
Associated templates
|
||||
|
||||
Each template is named by a string specified when it is created. Also, each
|
||||
template is associated with zero or more other templates that it may invoke by
|
||||
name; such associations are transitive and form a name space of templates.
|
||||
|
||||
A template may use a template invocation to instantiate another associated
|
||||
template; see the explanation of the "template" action above. The name must be
|
||||
that of a template associated with the template that contains the invocation.
|
||||
|
||||
Nested template definitions
|
||||
|
||||
When parsing a template, another template may be defined and associated with the
|
||||
template being parsed. Template definitions must appear at the top level of the
|
||||
template, much like global variables in a Go program.
|
||||
|
||||
The syntax of such definitions is to surround each template declaration with a
|
||||
"define" and "end" action.
|
||||
|
||||
The define action names the template being created by providing a string
|
||||
constant. Here is a simple example:
|
||||
|
||||
`{{define "T1"}}ONE{{end}}
|
||||
{{define "T2"}}TWO{{end}}
|
||||
{{define "T3"}}{{template "T1"}} {{template "T2"}}{{end}}
|
||||
{{template "T3"}}`
|
||||
|
||||
This defines two templates, T1 and T2, and a third T3 that invokes the other two
|
||||
when it is executed. Finally it invokes T3. If executed this template will
|
||||
produce the text
|
||||
|
||||
ONE TWO
|
||||
|
||||
By construction, a template may reside in only one association. If it's
|
||||
necessary to have a template addressable from multiple associations, the
|
||||
template definition must be parsed multiple times to create distinct *Template
|
||||
values, or must be copied with the Clone or AddParseTree method.
|
||||
|
||||
Parse may be called multiple times to assemble the various associated templates;
|
||||
see the ParseFiles and ParseGlob functions and methods for simple ways to parse
|
||||
related templates stored in files.
|
||||
|
||||
A template may be executed directly or through ExecuteTemplate, which executes
|
||||
an associated template identified by name. To invoke our example above, we
|
||||
might write,
|
||||
|
||||
err := tmpl.Execute(os.Stdout, "no data needed")
|
||||
if err != nil {
|
||||
log.Fatalf("execution failed: %s", err)
|
||||
}
|
||||
|
||||
or to invoke a particular template explicitly by name,
|
||||
|
||||
err := tmpl.ExecuteTemplate(os.Stdout, "T2", "no data needed")
|
||||
if err != nil {
|
||||
log.Fatalf("execution failed: %s", err)
|
||||
}
|
||||
|
||||
*/
|
||||
package template
|
71
Godeps/_workspace/src/github.com/alecthomas/template/example_test.go
generated
vendored
Normal file
71
Godeps/_workspace/src/github.com/alecthomas/template/example_test.go
generated
vendored
Normal file
|
@ -0,0 +1,71 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package template_test
|
||||
|
||||
import (
|
||||
"github.com/alecthomas/template"
|
||||
"log"
|
||||
"os"
|
||||
)
|
||||
|
||||
func ExampleTemplate() {
|
||||
// Define a template.
|
||||
const letter = `
|
||||
Dear {{.Name}},
|
||||
{{if .Attended}}
|
||||
It was a pleasure to see you at the wedding.{{else}}
|
||||
It is a shame you couldn't make it to the wedding.{{end}}
|
||||
{{with .Gift}}Thank you for the lovely {{.}}.
|
||||
{{end}}
|
||||
Best wishes,
|
||||
Josie
|
||||
`
|
||||
|
||||
// Prepare some data to insert into the template.
|
||||
type Recipient struct {
|
||||
Name, Gift string
|
||||
Attended bool
|
||||
}
|
||||
var recipients = []Recipient{
|
||||
{"Aunt Mildred", "bone china tea set", true},
|
||||
{"Uncle John", "moleskin pants", false},
|
||||
{"Cousin Rodney", "", false},
|
||||
}
|
||||
|
||||
// Create a new template and parse the letter into it.
|
||||
t := template.Must(template.New("letter").Parse(letter))
|
||||
|
||||
// Execute the template for each recipient.
|
||||
for _, r := range recipients {
|
||||
err := t.Execute(os.Stdout, r)
|
||||
if err != nil {
|
||||
log.Println("executing template:", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// Dear Aunt Mildred,
|
||||
//
|
||||
// It was a pleasure to see you at the wedding.
|
||||
// Thank you for the lovely bone china tea set.
|
||||
//
|
||||
// Best wishes,
|
||||
// Josie
|
||||
//
|
||||
// Dear Uncle John,
|
||||
//
|
||||
// It is a shame you couldn't make it to the wedding.
|
||||
// Thank you for the lovely moleskin pants.
|
||||
//
|
||||
// Best wishes,
|
||||
// Josie
|
||||
//
|
||||
// Dear Cousin Rodney,
|
||||
//
|
||||
// It is a shame you couldn't make it to the wedding.
|
||||
//
|
||||
// Best wishes,
|
||||
// Josie
|
||||
}
|
182
Godeps/_workspace/src/github.com/alecthomas/template/examplefiles_test.go
generated
vendored
Normal file
182
Godeps/_workspace/src/github.com/alecthomas/template/examplefiles_test.go
generated
vendored
Normal file
|
@ -0,0 +1,182 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package template_test
|
||||
|
||||
import (
|
||||
"github.com/alecthomas/template"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// templateFile defines the contents of a template to be stored in a file, for testing.
|
||||
type templateFile struct {
|
||||
name string
|
||||
contents string
|
||||
}
|
||||
|
||||
func createTestDir(files []templateFile) string {
|
||||
dir, err := ioutil.TempDir("", "template")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
for _, file := range files {
|
||||
f, err := os.Create(filepath.Join(dir, file.name))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
defer f.Close()
|
||||
_, err = io.WriteString(f, file.contents)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
return dir
|
||||
}
|
||||
|
||||
// Here we demonstrate loading a set of templates from a directory.
|
||||
func ExampleTemplate_glob() {
|
||||
// Here we create a temporary directory and populate it with our sample
|
||||
// template definition files; usually the template files would already
|
||||
// exist in some location known to the program.
|
||||
dir := createTestDir([]templateFile{
|
||||
// T0.tmpl is a plain template file that just invokes T1.
|
||||
{"T0.tmpl", `T0 invokes T1: ({{template "T1"}})`},
|
||||
// T1.tmpl defines a template, T1 that invokes T2.
|
||||
{"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
|
||||
// T2.tmpl defines a template T2.
|
||||
{"T2.tmpl", `{{define "T2"}}This is T2{{end}}`},
|
||||
})
|
||||
// Clean up after the test; another quirk of running as an example.
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
// pattern is the glob pattern used to find all the template files.
|
||||
pattern := filepath.Join(dir, "*.tmpl")
|
||||
|
||||
// Here starts the example proper.
|
||||
// T0.tmpl is the first name matched, so it becomes the starting template,
|
||||
// the value returned by ParseGlob.
|
||||
tmpl := template.Must(template.ParseGlob(pattern))
|
||||
|
||||
err := tmpl.Execute(os.Stdout, nil)
|
||||
if err != nil {
|
||||
log.Fatalf("template execution: %s", err)
|
||||
}
|
||||
// Output:
|
||||
// T0 invokes T1: (T1 invokes T2: (This is T2))
|
||||
}
|
||||
|
||||
// This example demonstrates one way to share some templates
|
||||
// and use them in different contexts. In this variant we add multiple driver
|
||||
// templates by hand to an existing bundle of templates.
|
||||
func ExampleTemplate_helpers() {
|
||||
// Here we create a temporary directory and populate it with our sample
|
||||
// template definition files; usually the template files would already
|
||||
// exist in some location known to the program.
|
||||
dir := createTestDir([]templateFile{
|
||||
// T1.tmpl defines a template, T1 that invokes T2.
|
||||
{"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
|
||||
// T2.tmpl defines a template T2.
|
||||
{"T2.tmpl", `{{define "T2"}}This is T2{{end}}`},
|
||||
})
|
||||
// Clean up after the test; another quirk of running as an example.
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
// pattern is the glob pattern used to find all the template files.
|
||||
pattern := filepath.Join(dir, "*.tmpl")
|
||||
|
||||
// Here starts the example proper.
|
||||
// Load the helpers.
|
||||
templates := template.Must(template.ParseGlob(pattern))
|
||||
// Add one driver template to the bunch; we do this with an explicit template definition.
|
||||
_, err := templates.Parse("{{define `driver1`}}Driver 1 calls T1: ({{template `T1`}})\n{{end}}")
|
||||
if err != nil {
|
||||
log.Fatal("parsing driver1: ", err)
|
||||
}
|
||||
// Add another driver template.
|
||||
_, err = templates.Parse("{{define `driver2`}}Driver 2 calls T2: ({{template `T2`}})\n{{end}}")
|
||||
if err != nil {
|
||||
log.Fatal("parsing driver2: ", err)
|
||||
}
|
||||
// We load all the templates before execution. This package does not require
|
||||
// that behavior but html/template's escaping does, so it's a good habit.
|
||||
err = templates.ExecuteTemplate(os.Stdout, "driver1", nil)
|
||||
if err != nil {
|
||||
log.Fatalf("driver1 execution: %s", err)
|
||||
}
|
||||
err = templates.ExecuteTemplate(os.Stdout, "driver2", nil)
|
||||
if err != nil {
|
||||
log.Fatalf("driver2 execution: %s", err)
|
||||
}
|
||||
// Output:
|
||||
// Driver 1 calls T1: (T1 invokes T2: (This is T2))
|
||||
// Driver 2 calls T2: (This is T2)
|
||||
}
|
||||
|
||||
// This example demonstrates how to use one group of driver
|
||||
// templates with distinct sets of helper templates.
|
||||
func ExampleTemplate_share() {
|
||||
// Here we create a temporary directory and populate it with our sample
|
||||
// template definition files; usually the template files would already
|
||||
// exist in some location known to the program.
|
||||
dir := createTestDir([]templateFile{
|
||||
// T0.tmpl is a plain template file that just invokes T1.
|
||||
{"T0.tmpl", "T0 ({{.}} version) invokes T1: ({{template `T1`}})\n"},
|
||||
// T1.tmpl defines a template, T1 that invokes T2. Note T2 is not defined
|
||||
{"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
|
||||
})
|
||||
// Clean up after the test; another quirk of running as an example.
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
// pattern is the glob pattern used to find all the template files.
|
||||
pattern := filepath.Join(dir, "*.tmpl")
|
||||
|
||||
// Here starts the example proper.
|
||||
// Load the drivers.
|
||||
drivers := template.Must(template.ParseGlob(pattern))
|
||||
|
||||
// We must define an implementation of the T2 template. First we clone
|
||||
// the drivers, then add a definition of T2 to the template name space.
|
||||
|
||||
// 1. Clone the helper set to create a new name space from which to run them.
|
||||
first, err := drivers.Clone()
|
||||
if err != nil {
|
||||
log.Fatal("cloning helpers: ", err)
|
||||
}
|
||||
// 2. Define T2, version A, and parse it.
|
||||
_, err = first.Parse("{{define `T2`}}T2, version A{{end}}")
|
||||
if err != nil {
|
||||
log.Fatal("parsing T2: ", err)
|
||||
}
|
||||
|
||||
// Now repeat the whole thing, using a different version of T2.
|
||||
// 1. Clone the drivers.
|
||||
second, err := drivers.Clone()
|
||||
if err != nil {
|
||||
log.Fatal("cloning drivers: ", err)
|
||||
}
|
||||
// 2. Define T2, version B, and parse it.
|
||||
_, err = second.Parse("{{define `T2`}}T2, version B{{end}}")
|
||||
if err != nil {
|
||||
log.Fatal("parsing T2: ", err)
|
||||
}
|
||||
|
||||
// Execute the templates in the reverse order to verify the
|
||||
// first is unaffected by the second.
|
||||
err = second.ExecuteTemplate(os.Stdout, "T0.tmpl", "second")
|
||||
if err != nil {
|
||||
log.Fatalf("second execution: %s", err)
|
||||
}
|
||||
err = first.ExecuteTemplate(os.Stdout, "T0.tmpl", "first")
|
||||
if err != nil {
|
||||
log.Fatalf("first: execution: %s", err)
|
||||
}
|
||||
|
||||
// Output:
|
||||
// T0 (second version) invokes T1: (T1 invokes T2: (T2, version B))
|
||||
// T0 (first version) invokes T1: (T1 invokes T2: (T2, version A))
|
||||
}
|
54
Godeps/_workspace/src/github.com/alecthomas/template/examplefunc_test.go
generated
vendored
Normal file
54
Godeps/_workspace/src/github.com/alecthomas/template/examplefunc_test.go
generated
vendored
Normal file
|
@ -0,0 +1,54 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package template_test
|
||||
|
||||
import (
|
||||
"github.com/alecthomas/template"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// This example demonstrates a custom function to process template text.
|
||||
// It installs the strings.Title function and uses it to
|
||||
// Make Title Text Look Good In Our Template's Output.
|
||||
func ExampleTemplate_func() {
|
||||
// First we create a FuncMap with which to register the function.
|
||||
funcMap := template.FuncMap{
|
||||
// The name "title" is what the function will be called in the template text.
|
||||
"title": strings.Title,
|
||||
}
|
||||
|
||||
// A simple template definition to test our function.
|
||||
// We print the input text several ways:
|
||||
// - the original
|
||||
// - title-cased
|
||||
// - title-cased and then printed with %q
|
||||
// - printed with %q and then title-cased.
|
||||
const templateText = `
|
||||
Input: {{printf "%q" .}}
|
||||
Output 0: {{title .}}
|
||||
Output 1: {{title . | printf "%q"}}
|
||||
Output 2: {{printf "%q" . | title}}
|
||||
`
|
||||
|
||||
// Create a template, add the function map, and parse the text.
|
||||
tmpl, err := template.New("titleTest").Funcs(funcMap).Parse(templateText)
|
||||
if err != nil {
|
||||
log.Fatalf("parsing: %s", err)
|
||||
}
|
||||
|
||||
// Run the template to verify the output.
|
||||
err = tmpl.Execute(os.Stdout, "the go programming language")
|
||||
if err != nil {
|
||||
log.Fatalf("execution: %s", err)
|
||||
}
|
||||
|
||||
// Output:
|
||||
// Input: "the go programming language"
|
||||
// Output 0: The Go Programming Language
|
||||
// Output 1: "The Go Programming Language"
|
||||
// Output 2: "The Go Programming Language"
|
||||
}
|
844
Godeps/_workspace/src/github.com/alecthomas/template/exec.go
generated
vendored
Normal file
844
Godeps/_workspace/src/github.com/alecthomas/template/exec.go
generated
vendored
Normal file
|
@ -0,0 +1,844 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package template
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/alecthomas/template/parse"
|
||||
"io"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// state represents the state of an execution. It's not part of the
|
||||
// template so that multiple executions of the same template
|
||||
// can execute in parallel.
|
||||
type state struct {
|
||||
tmpl *Template
|
||||
wr io.Writer
|
||||
node parse.Node // current node, for errors
|
||||
vars []variable // push-down stack of variable values.
|
||||
}
|
||||
|
||||
// variable holds the dynamic value of a variable such as $, $x etc.
|
||||
type variable struct {
|
||||
name string
|
||||
value reflect.Value
|
||||
}
|
||||
|
||||
// push pushes a new variable on the stack.
|
||||
func (s *state) push(name string, value reflect.Value) {
|
||||
s.vars = append(s.vars, variable{name, value})
|
||||
}
|
||||
|
||||
// mark returns the length of the variable stack.
|
||||
func (s *state) mark() int {
|
||||
return len(s.vars)
|
||||
}
|
||||
|
||||
// pop pops the variable stack up to the mark.
|
||||
func (s *state) pop(mark int) {
|
||||
s.vars = s.vars[0:mark]
|
||||
}
|
||||
|
||||
// setVar overwrites the top-nth variable on the stack. Used by range iterations.
|
||||
func (s *state) setVar(n int, value reflect.Value) {
|
||||
s.vars[len(s.vars)-n].value = value
|
||||
}
|
||||
|
||||
// varValue returns the value of the named variable.
|
||||
func (s *state) varValue(name string) reflect.Value {
|
||||
for i := s.mark() - 1; i >= 0; i-- {
|
||||
if s.vars[i].name == name {
|
||||
return s.vars[i].value
|
||||
}
|
||||
}
|
||||
s.errorf("undefined variable: %s", name)
|
||||
return zero
|
||||
}
|
||||
|
||||
var zero reflect.Value
|
||||
|
||||
// at marks the state to be on node n, for error reporting.
|
||||
func (s *state) at(node parse.Node) {
|
||||
s.node = node
|
||||
}
|
||||
|
||||
// doublePercent returns the string with %'s replaced by %%, if necessary,
|
||||
// so it can be used safely inside a Printf format string.
|
||||
func doublePercent(str string) string {
|
||||
if strings.Contains(str, "%") {
|
||||
str = strings.Replace(str, "%", "%%", -1)
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
// errorf formats the error and terminates processing.
|
||||
func (s *state) errorf(format string, args ...interface{}) {
|
||||
name := doublePercent(s.tmpl.Name())
|
||||
if s.node == nil {
|
||||
format = fmt.Sprintf("template: %s: %s", name, format)
|
||||
} else {
|
||||
location, context := s.tmpl.ErrorContext(s.node)
|
||||
format = fmt.Sprintf("template: %s: executing %q at <%s>: %s", location, name, doublePercent(context), format)
|
||||
}
|
||||
panic(fmt.Errorf(format, args...))
|
||||
}
|
||||
|
||||
// errRecover is the handler that turns panics into returns from the top
|
||||
// level of Parse.
|
||||
func errRecover(errp *error) {
|
||||
e := recover()
|
||||
if e != nil {
|
||||
switch err := e.(type) {
|
||||
case runtime.Error:
|
||||
panic(e)
|
||||
case error:
|
||||
*errp = err
|
||||
default:
|
||||
panic(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ExecuteTemplate applies the template associated with t that has the given name
|
||||
// to the specified data object and writes the output to wr.
|
||||
// If an error occurs executing the template or writing its output,
|
||||
// execution stops, but partial results may already have been written to
|
||||
// the output writer.
|
||||
// A template may be executed safely in parallel.
|
||||
func (t *Template) ExecuteTemplate(wr io.Writer, name string, data interface{}) error {
|
||||
tmpl := t.tmpl[name]
|
||||
if tmpl == nil {
|
||||
return fmt.Errorf("template: no template %q associated with template %q", name, t.name)
|
||||
}
|
||||
return tmpl.Execute(wr, data)
|
||||
}
|
||||
|
||||
// Execute applies a parsed template to the specified data object,
|
||||
// and writes the output to wr.
|
||||
// If an error occurs executing the template or writing its output,
|
||||
// execution stops, but partial results may already have been written to
|
||||
// the output writer.
|
||||
// A template may be executed safely in parallel.
|
||||
func (t *Template) Execute(wr io.Writer, data interface{}) (err error) {
|
||||
defer errRecover(&err)
|
||||
value := reflect.ValueOf(data)
|
||||
state := &state{
|
||||
tmpl: t,
|
||||
wr: wr,
|
||||
vars: []variable{{"$", value}},
|
||||
}
|
||||
t.init()
|
||||
if t.Tree == nil || t.Root == nil {
|
||||
var b bytes.Buffer
|
||||
for name, tmpl := range t.tmpl {
|
||||
if tmpl.Tree == nil || tmpl.Root == nil {
|
||||
continue
|
||||
}
|
||||
if b.Len() > 0 {
|
||||
b.WriteString(", ")
|
||||
}
|
||||
fmt.Fprintf(&b, "%q", name)
|
||||
}
|
||||
var s string
|
||||
if b.Len() > 0 {
|
||||
s = "; defined templates are: " + b.String()
|
||||
}
|
||||
state.errorf("%q is an incomplete or empty template%s", t.Name(), s)
|
||||
}
|
||||
state.walk(value, t.Root)
|
||||
return
|
||||
}
|
||||
|
||||
// Walk functions step through the major pieces of the template structure,
|
||||
// generating output as they go.
|
||||
func (s *state) walk(dot reflect.Value, node parse.Node) {
|
||||
s.at(node)
|
||||
switch node := node.(type) {
|
||||
case *parse.ActionNode:
|
||||
// Do not pop variables so they persist until next end.
|
||||
// Also, if the action declares variables, don't print the result.
|
||||
val := s.evalPipeline(dot, node.Pipe)
|
||||
if len(node.Pipe.Decl) == 0 {
|
||||
s.printValue(node, val)
|
||||
}
|
||||
case *parse.IfNode:
|
||||
s.walkIfOrWith(parse.NodeIf, dot, node.Pipe, node.List, node.ElseList)
|
||||
case *parse.ListNode:
|
||||
for _, node := range node.Nodes {
|
||||
s.walk(dot, node)
|
||||
}
|
||||
case *parse.RangeNode:
|
||||
s.walkRange(dot, node)
|
||||
case *parse.TemplateNode:
|
||||
s.walkTemplate(dot, node)
|
||||
case *parse.TextNode:
|
||||
if _, err := s.wr.Write(node.Text); err != nil {
|
||||
s.errorf("%s", err)
|
||||
}
|
||||
case *parse.WithNode:
|
||||
s.walkIfOrWith(parse.NodeWith, dot, node.Pipe, node.List, node.ElseList)
|
||||
default:
|
||||
s.errorf("unknown node: %s", node)
|
||||
}
|
||||
}
|
||||
|
||||
// walkIfOrWith walks an 'if' or 'with' node. The two control structures
|
||||
// are identical in behavior except that 'with' sets dot.
|
||||
func (s *state) walkIfOrWith(typ parse.NodeType, dot reflect.Value, pipe *parse.PipeNode, list, elseList *parse.ListNode) {
|
||||
defer s.pop(s.mark())
|
||||
val := s.evalPipeline(dot, pipe)
|
||||
truth, ok := isTrue(val)
|
||||
if !ok {
|
||||
s.errorf("if/with can't use %v", val)
|
||||
}
|
||||
if truth {
|
||||
if typ == parse.NodeWith {
|
||||
s.walk(val, list)
|
||||
} else {
|
||||
s.walk(dot, list)
|
||||
}
|
||||
} else if elseList != nil {
|
||||
s.walk(dot, elseList)
|
||||
}
|
||||
}
|
||||
|
||||
// isTrue reports whether the value is 'true', in the sense of not the zero of its type,
|
||||
// and whether the value has a meaningful truth value.
|
||||
func isTrue(val reflect.Value) (truth, ok bool) {
|
||||
if !val.IsValid() {
|
||||
// Something like var x interface{}, never set. It's a form of nil.
|
||||
return false, true
|
||||
}
|
||||
switch val.Kind() {
|
||||
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
|
||||
truth = val.Len() > 0
|
||||
case reflect.Bool:
|
||||
truth = val.Bool()
|
||||
case reflect.Complex64, reflect.Complex128:
|
||||
truth = val.Complex() != 0
|
||||
case reflect.Chan, reflect.Func, reflect.Ptr, reflect.Interface:
|
||||
truth = !val.IsNil()
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
truth = val.Int() != 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
truth = val.Float() != 0
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
truth = val.Uint() != 0
|
||||
case reflect.Struct:
|
||||
truth = true // Struct values are always true.
|
||||
default:
|
||||
return
|
||||
}
|
||||
return truth, true
|
||||
}
|
||||
|
||||
func (s *state) walkRange(dot reflect.Value, r *parse.RangeNode) {
|
||||
s.at(r)
|
||||
defer s.pop(s.mark())
|
||||
val, _ := indirect(s.evalPipeline(dot, r.Pipe))
|
||||
// mark top of stack before any variables in the body are pushed.
|
||||
mark := s.mark()
|
||||
oneIteration := func(index, elem reflect.Value) {
|
||||
// Set top var (lexically the second if there are two) to the element.
|
||||
if len(r.Pipe.Decl) > 0 {
|
||||
s.setVar(1, elem)
|
||||
}
|
||||
// Set next var (lexically the first if there are two) to the index.
|
||||
if len(r.Pipe.Decl) > 1 {
|
||||
s.setVar(2, index)
|
||||
}
|
||||
s.walk(elem, r.List)
|
||||
s.pop(mark)
|
||||
}
|
||||
switch val.Kind() {
|
||||
case reflect.Array, reflect.Slice:
|
||||
if val.Len() == 0 {
|
||||
break
|
||||
}
|
||||
for i := 0; i < val.Len(); i++ {
|
||||
oneIteration(reflect.ValueOf(i), val.Index(i))
|
||||
}
|
||||
return
|
||||
case reflect.Map:
|
||||
if val.Len() == 0 {
|
||||
break
|
||||
}
|
||||
for _, key := range sortKeys(val.MapKeys()) {
|
||||
oneIteration(key, val.MapIndex(key))
|
||||
}
|
||||
return
|
||||
case reflect.Chan:
|
||||
if val.IsNil() {
|
||||
break
|
||||
}
|
||||
i := 0
|
||||
for ; ; i++ {
|
||||
elem, ok := val.Recv()
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
oneIteration(reflect.ValueOf(i), elem)
|
||||
}
|
||||
if i == 0 {
|
||||
break
|
||||
}
|
||||
return
|
||||
case reflect.Invalid:
|
||||
break // An invalid value is likely a nil map, etc. and acts like an empty map.
|
||||
default:
|
||||
s.errorf("range can't iterate over %v", val)
|
||||
}
|
||||
if r.ElseList != nil {
|
||||
s.walk(dot, r.ElseList)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *state) walkTemplate(dot reflect.Value, t *parse.TemplateNode) {
|
||||
s.at(t)
|
||||
tmpl := s.tmpl.tmpl[t.Name]
|
||||
if tmpl == nil {
|
||||
s.errorf("template %q not defined", t.Name)
|
||||
}
|
||||
// Variables declared by the pipeline persist.
|
||||
dot = s.evalPipeline(dot, t.Pipe)
|
||||
newState := *s
|
||||
newState.tmpl = tmpl
|
||||
// No dynamic scoping: template invocations inherit no variables.
|
||||
newState.vars = []variable{{"$", dot}}
|
||||
newState.walk(dot, tmpl.Root)
|
||||
}
|
||||
|
||||
// Eval functions evaluate pipelines, commands, and their elements and extract
|
||||
// values from the data structure by examining fields, calling methods, and so on.
|
||||
// The printing of those values happens only through walk functions.
|
||||
|
||||
// evalPipeline returns the value acquired by evaluating a pipeline. If the
|
||||
// pipeline has a variable declaration, the variable will be pushed on the
|
||||
// stack. Callers should therefore pop the stack after they are finished
|
||||
// executing commands depending on the pipeline value.
|
||||
func (s *state) evalPipeline(dot reflect.Value, pipe *parse.PipeNode) (value reflect.Value) {
|
||||
if pipe == nil {
|
||||
return
|
||||
}
|
||||
s.at(pipe)
|
||||
for _, cmd := range pipe.Cmds {
|
||||
value = s.evalCommand(dot, cmd, value) // previous value is this one's final arg.
|
||||
// If the object has type interface{}, dig down one level to the thing inside.
|
||||
if value.Kind() == reflect.Interface && value.Type().NumMethod() == 0 {
|
||||
value = reflect.ValueOf(value.Interface()) // lovely!
|
||||
}
|
||||
}
|
||||
for _, variable := range pipe.Decl {
|
||||
s.push(variable.Ident[0], value)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func (s *state) notAFunction(args []parse.Node, final reflect.Value) {
|
||||
if len(args) > 1 || final.IsValid() {
|
||||
s.errorf("can't give argument to non-function %s", args[0])
|
||||
}
|
||||
}
|
||||
|
||||
func (s *state) evalCommand(dot reflect.Value, cmd *parse.CommandNode, final reflect.Value) reflect.Value {
|
||||
firstWord := cmd.Args[0]
|
||||
switch n := firstWord.(type) {
|
||||
case *parse.FieldNode:
|
||||
return s.evalFieldNode(dot, n, cmd.Args, final)
|
||||
case *parse.ChainNode:
|
||||
return s.evalChainNode(dot, n, cmd.Args, final)
|
||||
case *parse.IdentifierNode:
|
||||
// Must be a function.
|
||||
return s.evalFunction(dot, n, cmd, cmd.Args, final)
|
||||
case *parse.PipeNode:
|
||||
// Parenthesized pipeline. The arguments are all inside the pipeline; final is ignored.
|
||||
return s.evalPipeline(dot, n)
|
||||
case *parse.VariableNode:
|
||||
return s.evalVariableNode(dot, n, cmd.Args, final)
|
||||
}
|
||||
s.at(firstWord)
|
||||
s.notAFunction(cmd.Args, final)
|
||||
switch word := firstWord.(type) {
|
||||
case *parse.BoolNode:
|
||||
return reflect.ValueOf(word.True)
|
||||
case *parse.DotNode:
|
||||
return dot
|
||||
case *parse.NilNode:
|
||||
s.errorf("nil is not a command")
|
||||
case *parse.NumberNode:
|
||||
return s.idealConstant(word)
|
||||
case *parse.StringNode:
|
||||
return reflect.ValueOf(word.Text)
|
||||
}
|
||||
s.errorf("can't evaluate command %q", firstWord)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
// idealConstant is called to return the value of a number in a context where
|
||||
// we don't know the type. In that case, the syntax of the number tells us
|
||||
// its type, and we use Go rules to resolve. Note there is no such thing as
|
||||
// a uint ideal constant in this situation - the value must be of int type.
|
||||
func (s *state) idealConstant(constant *parse.NumberNode) reflect.Value {
|
||||
// These are ideal constants but we don't know the type
|
||||
// and we have no context. (If it was a method argument,
|
||||
// we'd know what we need.) The syntax guides us to some extent.
|
||||
s.at(constant)
|
||||
switch {
|
||||
case constant.IsComplex:
|
||||
return reflect.ValueOf(constant.Complex128) // incontrovertible.
|
||||
case constant.IsFloat && !isHexConstant(constant.Text) && strings.IndexAny(constant.Text, ".eE") >= 0:
|
||||
return reflect.ValueOf(constant.Float64)
|
||||
case constant.IsInt:
|
||||
n := int(constant.Int64)
|
||||
if int64(n) != constant.Int64 {
|
||||
s.errorf("%s overflows int", constant.Text)
|
||||
}
|
||||
return reflect.ValueOf(n)
|
||||
case constant.IsUint:
|
||||
s.errorf("%s overflows int", constant.Text)
|
||||
}
|
||||
return zero
|
||||
}
|
||||
|
||||
func isHexConstant(s string) bool {
|
||||
return len(s) > 2 && s[0] == '0' && (s[1] == 'x' || s[1] == 'X')
|
||||
}
|
||||
|
||||
func (s *state) evalFieldNode(dot reflect.Value, field *parse.FieldNode, args []parse.Node, final reflect.Value) reflect.Value {
|
||||
s.at(field)
|
||||
return s.evalFieldChain(dot, dot, field, field.Ident, args, final)
|
||||
}
|
||||
|
||||
func (s *state) evalChainNode(dot reflect.Value, chain *parse.ChainNode, args []parse.Node, final reflect.Value) reflect.Value {
|
||||
s.at(chain)
|
||||
// (pipe).Field1.Field2 has pipe as .Node, fields as .Field. Eval the pipeline, then the fields.
|
||||
pipe := s.evalArg(dot, nil, chain.Node)
|
||||
if len(chain.Field) == 0 {
|
||||
s.errorf("internal error: no fields in evalChainNode")
|
||||
}
|
||||
return s.evalFieldChain(dot, pipe, chain, chain.Field, args, final)
|
||||
}
|
||||
|
||||
func (s *state) evalVariableNode(dot reflect.Value, variable *parse.VariableNode, args []parse.Node, final reflect.Value) reflect.Value {
|
||||
// $x.Field has $x as the first ident, Field as the second. Eval the var, then the fields.
|
||||
s.at(variable)
|
||||
value := s.varValue(variable.Ident[0])
|
||||
if len(variable.Ident) == 1 {
|
||||
s.notAFunction(args, final)
|
||||
return value
|
||||
}
|
||||
return s.evalFieldChain(dot, value, variable, variable.Ident[1:], args, final)
|
||||
}
|
||||
|
||||
// evalFieldChain evaluates .X.Y.Z possibly followed by arguments.
|
||||
// dot is the environment in which to evaluate arguments, while
|
||||
// receiver is the value being walked along the chain.
|
||||
func (s *state) evalFieldChain(dot, receiver reflect.Value, node parse.Node, ident []string, args []parse.Node, final reflect.Value) reflect.Value {
|
||||
n := len(ident)
|
||||
for i := 0; i < n-1; i++ {
|
||||
receiver = s.evalField(dot, ident[i], node, nil, zero, receiver)
|
||||
}
|
||||
// Now if it's a method, it gets the arguments.
|
||||
return s.evalField(dot, ident[n-1], node, args, final, receiver)
|
||||
}
|
||||
|
||||
func (s *state) evalFunction(dot reflect.Value, node *parse.IdentifierNode, cmd parse.Node, args []parse.Node, final reflect.Value) reflect.Value {
|
||||
s.at(node)
|
||||
name := node.Ident
|
||||
function, ok := findFunction(name, s.tmpl)
|
||||
if !ok {
|
||||
s.errorf("%q is not a defined function", name)
|
||||
}
|
||||
return s.evalCall(dot, function, cmd, name, args, final)
|
||||
}
|
||||
|
||||
// evalField evaluates an expression like (.Field) or (.Field arg1 arg2).
|
||||
// The 'final' argument represents the return value from the preceding
|
||||
// value of the pipeline, if any.
|
||||
func (s *state) evalField(dot reflect.Value, fieldName string, node parse.Node, args []parse.Node, final, receiver reflect.Value) reflect.Value {
|
||||
if !receiver.IsValid() {
|
||||
return zero
|
||||
}
|
||||
typ := receiver.Type()
|
||||
receiver, _ = indirect(receiver)
|
||||
// Unless it's an interface, need to get to a value of type *T to guarantee
|
||||
// we see all methods of T and *T.
|
||||
ptr := receiver
|
||||
if ptr.Kind() != reflect.Interface && ptr.CanAddr() {
|
||||
ptr = ptr.Addr()
|
||||
}
|
||||
if method := ptr.MethodByName(fieldName); method.IsValid() {
|
||||
return s.evalCall(dot, method, node, fieldName, args, final)
|
||||
}
|
||||
hasArgs := len(args) > 1 || final.IsValid()
|
||||
// It's not a method; must be a field of a struct or an element of a map. The receiver must not be nil.
|
||||
receiver, isNil := indirect(receiver)
|
||||
if isNil {
|
||||
s.errorf("nil pointer evaluating %s.%s", typ, fieldName)
|
||||
}
|
||||
switch receiver.Kind() {
|
||||
case reflect.Struct:
|
||||
tField, ok := receiver.Type().FieldByName(fieldName)
|
||||
if ok {
|
||||
field := receiver.FieldByIndex(tField.Index)
|
||||
if tField.PkgPath != "" { // field is unexported
|
||||
s.errorf("%s is an unexported field of struct type %s", fieldName, typ)
|
||||
}
|
||||
// If it's a function, we must call it.
|
||||
if hasArgs {
|
||||
s.errorf("%s has arguments but cannot be invoked as function", fieldName)
|
||||
}
|
||||
return field
|
||||
}
|
||||
s.errorf("%s is not a field of struct type %s", fieldName, typ)
|
||||
case reflect.Map:
|
||||
// If it's a map, attempt to use the field name as a key.
|
||||
nameVal := reflect.ValueOf(fieldName)
|
||||
if nameVal.Type().AssignableTo(receiver.Type().Key()) {
|
||||
if hasArgs {
|
||||
s.errorf("%s is not a method but has arguments", fieldName)
|
||||
}
|
||||
return receiver.MapIndex(nameVal)
|
||||
}
|
||||
}
|
||||
s.errorf("can't evaluate field %s in type %s", fieldName, typ)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
var (
|
||||
errorType = reflect.TypeOf((*error)(nil)).Elem()
|
||||
fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem()
|
||||
)
|
||||
|
||||
// evalCall executes a function or method call. If it's a method, fun already has the receiver bound, so
|
||||
// it looks just like a function call. The arg list, if non-nil, includes (in the manner of the shell), arg[0]
|
||||
// as the function itself.
|
||||
func (s *state) evalCall(dot, fun reflect.Value, node parse.Node, name string, args []parse.Node, final reflect.Value) reflect.Value {
|
||||
if args != nil {
|
||||
args = args[1:] // Zeroth arg is function name/node; not passed to function.
|
||||
}
|
||||
typ := fun.Type()
|
||||
numIn := len(args)
|
||||
if final.IsValid() {
|
||||
numIn++
|
||||
}
|
||||
numFixed := len(args)
|
||||
if typ.IsVariadic() {
|
||||
numFixed = typ.NumIn() - 1 // last arg is the variadic one.
|
||||
if numIn < numFixed {
|
||||
s.errorf("wrong number of args for %s: want at least %d got %d", name, typ.NumIn()-1, len(args))
|
||||
}
|
||||
} else if numIn < typ.NumIn()-1 || !typ.IsVariadic() && numIn != typ.NumIn() {
|
||||
s.errorf("wrong number of args for %s: want %d got %d", name, typ.NumIn(), len(args))
|
||||
}
|
||||
if !goodFunc(typ) {
|
||||
// TODO: This could still be a confusing error; maybe goodFunc should provide info.
|
||||
s.errorf("can't call method/function %q with %d results", name, typ.NumOut())
|
||||
}
|
||||
// Build the arg list.
|
||||
argv := make([]reflect.Value, numIn)
|
||||
// Args must be evaluated. Fixed args first.
|
||||
i := 0
|
||||
for ; i < numFixed && i < len(args); i++ {
|
||||
argv[i] = s.evalArg(dot, typ.In(i), args[i])
|
||||
}
|
||||
// Now the ... args.
|
||||
if typ.IsVariadic() {
|
||||
argType := typ.In(typ.NumIn() - 1).Elem() // Argument is a slice.
|
||||
for ; i < len(args); i++ {
|
||||
argv[i] = s.evalArg(dot, argType, args[i])
|
||||
}
|
||||
}
|
||||
// Add final value if necessary.
|
||||
if final.IsValid() {
|
||||
t := typ.In(typ.NumIn() - 1)
|
||||
if typ.IsVariadic() {
|
||||
t = t.Elem()
|
||||
}
|
||||
argv[i] = s.validateType(final, t)
|
||||
}
|
||||
result := fun.Call(argv)
|
||||
// If we have an error that is not nil, stop execution and return that error to the caller.
|
||||
if len(result) == 2 && !result[1].IsNil() {
|
||||
s.at(node)
|
||||
s.errorf("error calling %s: %s", name, result[1].Interface().(error))
|
||||
}
|
||||
return result[0]
|
||||
}
|
||||
|
||||
// canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero.
|
||||
func canBeNil(typ reflect.Type) bool {
|
||||
switch typ.Kind() {
|
||||
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// validateType guarantees that the value is valid and assignable to the type.
|
||||
func (s *state) validateType(value reflect.Value, typ reflect.Type) reflect.Value {
|
||||
if !value.IsValid() {
|
||||
if typ == nil || canBeNil(typ) {
|
||||
// An untyped nil interface{}. Accept as a proper nil value.
|
||||
return reflect.Zero(typ)
|
||||
}
|
||||
s.errorf("invalid value; expected %s", typ)
|
||||
}
|
||||
if typ != nil && !value.Type().AssignableTo(typ) {
|
||||
if value.Kind() == reflect.Interface && !value.IsNil() {
|
||||
value = value.Elem()
|
||||
if value.Type().AssignableTo(typ) {
|
||||
return value
|
||||
}
|
||||
// fallthrough
|
||||
}
|
||||
// Does one dereference or indirection work? We could do more, as we
|
||||
// do with method receivers, but that gets messy and method receivers
|
||||
// are much more constrained, so it makes more sense there than here.
|
||||
// Besides, one is almost always all you need.
|
||||
switch {
|
||||
case value.Kind() == reflect.Ptr && value.Type().Elem().AssignableTo(typ):
|
||||
value = value.Elem()
|
||||
if !value.IsValid() {
|
||||
s.errorf("dereference of nil pointer of type %s", typ)
|
||||
}
|
||||
case reflect.PtrTo(value.Type()).AssignableTo(typ) && value.CanAddr():
|
||||
value = value.Addr()
|
||||
default:
|
||||
s.errorf("wrong type for value; expected %s; got %s", typ, value.Type())
|
||||
}
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func (s *state) evalArg(dot reflect.Value, typ reflect.Type, n parse.Node) reflect.Value {
|
||||
s.at(n)
|
||||
switch arg := n.(type) {
|
||||
case *parse.DotNode:
|
||||
return s.validateType(dot, typ)
|
||||
case *parse.NilNode:
|
||||
if canBeNil(typ) {
|
||||
return reflect.Zero(typ)
|
||||
}
|
||||
s.errorf("cannot assign nil to %s", typ)
|
||||
case *parse.FieldNode:
|
||||
return s.validateType(s.evalFieldNode(dot, arg, []parse.Node{n}, zero), typ)
|
||||
case *parse.VariableNode:
|
||||
return s.validateType(s.evalVariableNode(dot, arg, nil, zero), typ)
|
||||
case *parse.PipeNode:
|
||||
return s.validateType(s.evalPipeline(dot, arg), typ)
|
||||
case *parse.IdentifierNode:
|
||||
return s.evalFunction(dot, arg, arg, nil, zero)
|
||||
case *parse.ChainNode:
|
||||
return s.validateType(s.evalChainNode(dot, arg, nil, zero), typ)
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Bool:
|
||||
return s.evalBool(typ, n)
|
||||
case reflect.Complex64, reflect.Complex128:
|
||||
return s.evalComplex(typ, n)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return s.evalFloat(typ, n)
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return s.evalInteger(typ, n)
|
||||
case reflect.Interface:
|
||||
if typ.NumMethod() == 0 {
|
||||
return s.evalEmptyInterface(dot, n)
|
||||
}
|
||||
case reflect.String:
|
||||
return s.evalString(typ, n)
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return s.evalUnsignedInteger(typ, n)
|
||||
}
|
||||
s.errorf("can't handle %s for arg of type %s", n, typ)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (s *state) evalBool(typ reflect.Type, n parse.Node) reflect.Value {
|
||||
s.at(n)
|
||||
if n, ok := n.(*parse.BoolNode); ok {
|
||||
value := reflect.New(typ).Elem()
|
||||
value.SetBool(n.True)
|
||||
return value
|
||||
}
|
||||
s.errorf("expected bool; found %s", n)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (s *state) evalString(typ reflect.Type, n parse.Node) reflect.Value {
|
||||
s.at(n)
|
||||
if n, ok := n.(*parse.StringNode); ok {
|
||||
value := reflect.New(typ).Elem()
|
||||
value.SetString(n.Text)
|
||||
return value
|
||||
}
|
||||
s.errorf("expected string; found %s", n)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (s *state) evalInteger(typ reflect.Type, n parse.Node) reflect.Value {
|
||||
s.at(n)
|
||||
if n, ok := n.(*parse.NumberNode); ok && n.IsInt {
|
||||
value := reflect.New(typ).Elem()
|
||||
value.SetInt(n.Int64)
|
||||
return value
|
||||
}
|
||||
s.errorf("expected integer; found %s", n)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (s *state) evalUnsignedInteger(typ reflect.Type, n parse.Node) reflect.Value {
|
||||
s.at(n)
|
||||
if n, ok := n.(*parse.NumberNode); ok && n.IsUint {
|
||||
value := reflect.New(typ).Elem()
|
||||
value.SetUint(n.Uint64)
|
||||
return value
|
||||
}
|
||||
s.errorf("expected unsigned integer; found %s", n)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (s *state) evalFloat(typ reflect.Type, n parse.Node) reflect.Value {
|
||||
s.at(n)
|
||||
if n, ok := n.(*parse.NumberNode); ok && n.IsFloat {
|
||||
value := reflect.New(typ).Elem()
|
||||
value.SetFloat(n.Float64)
|
||||
return value
|
||||
}
|
||||
s.errorf("expected float; found %s", n)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (s *state) evalComplex(typ reflect.Type, n parse.Node) reflect.Value {
|
||||
if n, ok := n.(*parse.NumberNode); ok && n.IsComplex {
|
||||
value := reflect.New(typ).Elem()
|
||||
value.SetComplex(n.Complex128)
|
||||
return value
|
||||
}
|
||||
s.errorf("expected complex; found %s", n)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (s *state) evalEmptyInterface(dot reflect.Value, n parse.Node) reflect.Value {
|
||||
s.at(n)
|
||||
switch n := n.(type) {
|
||||
case *parse.BoolNode:
|
||||
return reflect.ValueOf(n.True)
|
||||
case *parse.DotNode:
|
||||
return dot
|
||||
case *parse.FieldNode:
|
||||
return s.evalFieldNode(dot, n, nil, zero)
|
||||
case *parse.IdentifierNode:
|
||||
return s.evalFunction(dot, n, n, nil, zero)
|
||||
case *parse.NilNode:
|
||||
// NilNode is handled in evalArg, the only place that calls here.
|
||||
s.errorf("evalEmptyInterface: nil (can't happen)")
|
||||
case *parse.NumberNode:
|
||||
return s.idealConstant(n)
|
||||
case *parse.StringNode:
|
||||
return reflect.ValueOf(n.Text)
|
||||
case *parse.VariableNode:
|
||||
return s.evalVariableNode(dot, n, nil, zero)
|
||||
case *parse.PipeNode:
|
||||
return s.evalPipeline(dot, n)
|
||||
}
|
||||
s.errorf("can't handle assignment of %s to empty interface argument", n)
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
// indirect returns the item at the end of indirection, and a bool to indicate if it's nil.
|
||||
// We indirect through pointers and empty interfaces (only) because
|
||||
// non-empty interfaces have methods we might need.
|
||||
func indirect(v reflect.Value) (rv reflect.Value, isNil bool) {
|
||||
for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() {
|
||||
if v.IsNil() {
|
||||
return v, true
|
||||
}
|
||||
if v.Kind() == reflect.Interface && v.NumMethod() > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return v, false
|
||||
}
|
||||
|
||||
// printValue writes the textual representation of the value to the output of
|
||||
// the template.
|
||||
func (s *state) printValue(n parse.Node, v reflect.Value) {
|
||||
s.at(n)
|
||||
iface, ok := printableValue(v)
|
||||
if !ok {
|
||||
s.errorf("can't print %s of type %s", n, v.Type())
|
||||
}
|
||||
fmt.Fprint(s.wr, iface)
|
||||
}
|
||||
|
||||
// printableValue returns the, possibly indirected, interface value inside v that
|
||||
// is best for a call to formatted printer.
|
||||
func printableValue(v reflect.Value) (interface{}, bool) {
|
||||
if v.Kind() == reflect.Ptr {
|
||||
v, _ = indirect(v) // fmt.Fprint handles nil.
|
||||
}
|
||||
if !v.IsValid() {
|
||||
return "<no value>", true
|
||||
}
|
||||
|
||||
if !v.Type().Implements(errorType) && !v.Type().Implements(fmtStringerType) {
|
||||
if v.CanAddr() && (reflect.PtrTo(v.Type()).Implements(errorType) || reflect.PtrTo(v.Type()).Implements(fmtStringerType)) {
|
||||
v = v.Addr()
|
||||
} else {
|
||||
switch v.Kind() {
|
||||
case reflect.Chan, reflect.Func:
|
||||
return nil, false
|
||||
}
|
||||
}
|
||||
}
|
||||
return v.Interface(), true
|
||||
}
|
||||
|
||||
// Types to help sort the keys in a map for reproducible output.
|
||||
|
||||
type rvs []reflect.Value
|
||||
|
||||
func (x rvs) Len() int { return len(x) }
|
||||
func (x rvs) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
type rvInts struct{ rvs }
|
||||
|
||||
func (x rvInts) Less(i, j int) bool { return x.rvs[i].Int() < x.rvs[j].Int() }
|
||||
|
||||
type rvUints struct{ rvs }
|
||||
|
||||
func (x rvUints) Less(i, j int) bool { return x.rvs[i].Uint() < x.rvs[j].Uint() }
|
||||
|
||||
type rvFloats struct{ rvs }
|
||||
|
||||
func (x rvFloats) Less(i, j int) bool { return x.rvs[i].Float() < x.rvs[j].Float() }
|
||||
|
||||
type rvStrings struct{ rvs }
|
||||
|
||||
func (x rvStrings) Less(i, j int) bool { return x.rvs[i].String() < x.rvs[j].String() }
|
||||
|
||||
// sortKeys sorts (if it can) the slice of reflect.Values, which is a slice of map keys.
|
||||
func sortKeys(v []reflect.Value) []reflect.Value {
|
||||
if len(v) <= 1 {
|
||||
return v
|
||||
}
|
||||
switch v[0].Kind() {
|
||||
case reflect.Float32, reflect.Float64:
|
||||
sort.Sort(rvFloats{v})
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
sort.Sort(rvInts{v})
|
||||
case reflect.String:
|
||||
sort.Sort(rvStrings{v})
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
sort.Sort(rvUints{v})
|
||||
}
|
||||
return v
|
||||
}
|
1044
Godeps/_workspace/src/github.com/alecthomas/template/exec_test.go
generated
vendored
Normal file
1044
Godeps/_workspace/src/github.com/alecthomas/template/exec_test.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
598
Godeps/_workspace/src/github.com/alecthomas/template/funcs.go
generated
vendored
Normal file
598
Godeps/_workspace/src/github.com/alecthomas/template/funcs.go
generated
vendored
Normal file
|
@ -0,0 +1,598 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package template
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"reflect"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// FuncMap is the type of the map defining the mapping from names to functions.
|
||||
// Each function must have either a single return value, or two return values of
|
||||
// which the second has type error. In that case, if the second (error)
|
||||
// return value evaluates to non-nil during execution, execution terminates and
|
||||
// Execute returns that error.
|
||||
type FuncMap map[string]interface{}
|
||||
|
||||
var builtins = FuncMap{
|
||||
"and": and,
|
||||
"call": call,
|
||||
"html": HTMLEscaper,
|
||||
"index": index,
|
||||
"js": JSEscaper,
|
||||
"len": length,
|
||||
"not": not,
|
||||
"or": or,
|
||||
"print": fmt.Sprint,
|
||||
"printf": fmt.Sprintf,
|
||||
"println": fmt.Sprintln,
|
||||
"urlquery": URLQueryEscaper,
|
||||
|
||||
// Comparisons
|
||||
"eq": eq, // ==
|
||||
"ge": ge, // >=
|
||||
"gt": gt, // >
|
||||
"le": le, // <=
|
||||
"lt": lt, // <
|
||||
"ne": ne, // !=
|
||||
}
|
||||
|
||||
var builtinFuncs = createValueFuncs(builtins)
|
||||
|
||||
// createValueFuncs turns a FuncMap into a map[string]reflect.Value
|
||||
func createValueFuncs(funcMap FuncMap) map[string]reflect.Value {
|
||||
m := make(map[string]reflect.Value)
|
||||
addValueFuncs(m, funcMap)
|
||||
return m
|
||||
}
|
||||
|
||||
// addValueFuncs adds to values the functions in funcs, converting them to reflect.Values.
|
||||
func addValueFuncs(out map[string]reflect.Value, in FuncMap) {
|
||||
for name, fn := range in {
|
||||
v := reflect.ValueOf(fn)
|
||||
if v.Kind() != reflect.Func {
|
||||
panic("value for " + name + " not a function")
|
||||
}
|
||||
if !goodFunc(v.Type()) {
|
||||
panic(fmt.Errorf("can't install method/function %q with %d results", name, v.Type().NumOut()))
|
||||
}
|
||||
out[name] = v
|
||||
}
|
||||
}
|
||||
|
||||
// addFuncs adds to values the functions in funcs. It does no checking of the input -
|
||||
// call addValueFuncs first.
|
||||
func addFuncs(out, in FuncMap) {
|
||||
for name, fn := range in {
|
||||
out[name] = fn
|
||||
}
|
||||
}
|
||||
|
||||
// goodFunc checks that the function or method has the right result signature.
|
||||
func goodFunc(typ reflect.Type) bool {
|
||||
// We allow functions with 1 result or 2 results where the second is an error.
|
||||
switch {
|
||||
case typ.NumOut() == 1:
|
||||
return true
|
||||
case typ.NumOut() == 2 && typ.Out(1) == errorType:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// findFunction looks for a function in the template, and global map.
|
||||
func findFunction(name string, tmpl *Template) (reflect.Value, bool) {
|
||||
if tmpl != nil && tmpl.common != nil {
|
||||
if fn := tmpl.execFuncs[name]; fn.IsValid() {
|
||||
return fn, true
|
||||
}
|
||||
}
|
||||
if fn := builtinFuncs[name]; fn.IsValid() {
|
||||
return fn, true
|
||||
}
|
||||
return reflect.Value{}, false
|
||||
}
|
||||
|
||||
// Indexing.
|
||||
|
||||
// index returns the result of indexing its first argument by the following
|
||||
// arguments. Thus "index x 1 2 3" is, in Go syntax, x[1][2][3]. Each
|
||||
// indexed item must be a map, slice, or array.
|
||||
func index(item interface{}, indices ...interface{}) (interface{}, error) {
|
||||
v := reflect.ValueOf(item)
|
||||
for _, i := range indices {
|
||||
index := reflect.ValueOf(i)
|
||||
var isNil bool
|
||||
if v, isNil = indirect(v); isNil {
|
||||
return nil, fmt.Errorf("index of nil pointer")
|
||||
}
|
||||
switch v.Kind() {
|
||||
case reflect.Array, reflect.Slice, reflect.String:
|
||||
var x int64
|
||||
switch index.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
x = index.Int()
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
x = int64(index.Uint())
|
||||
default:
|
||||
return nil, fmt.Errorf("cannot index slice/array with type %s", index.Type())
|
||||
}
|
||||
if x < 0 || x >= int64(v.Len()) {
|
||||
return nil, fmt.Errorf("index out of range: %d", x)
|
||||
}
|
||||
v = v.Index(int(x))
|
||||
case reflect.Map:
|
||||
if !index.IsValid() {
|
||||
index = reflect.Zero(v.Type().Key())
|
||||
}
|
||||
if !index.Type().AssignableTo(v.Type().Key()) {
|
||||
return nil, fmt.Errorf("%s is not index type for %s", index.Type(), v.Type())
|
||||
}
|
||||
if x := v.MapIndex(index); x.IsValid() {
|
||||
v = x
|
||||
} else {
|
||||
v = reflect.Zero(v.Type().Elem())
|
||||
}
|
||||
default:
|
||||
return nil, fmt.Errorf("can't index item of type %s", v.Type())
|
||||
}
|
||||
}
|
||||
return v.Interface(), nil
|
||||
}
|
||||
|
||||
// Length
|
||||
|
||||
// length returns the length of the item, with an error if it has no defined length.
|
||||
func length(item interface{}) (int, error) {
|
||||
v, isNil := indirect(reflect.ValueOf(item))
|
||||
if isNil {
|
||||
return 0, fmt.Errorf("len of nil pointer")
|
||||
}
|
||||
switch v.Kind() {
|
||||
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
|
||||
return v.Len(), nil
|
||||
}
|
||||
return 0, fmt.Errorf("len of type %s", v.Type())
|
||||
}
|
||||
|
||||
// Function invocation
|
||||
|
||||
// call returns the result of evaluating the first argument as a function.
|
||||
// The function must return 1 result, or 2 results, the second of which is an error.
|
||||
func call(fn interface{}, args ...interface{}) (interface{}, error) {
|
||||
v := reflect.ValueOf(fn)
|
||||
typ := v.Type()
|
||||
if typ.Kind() != reflect.Func {
|
||||
return nil, fmt.Errorf("non-function of type %s", typ)
|
||||
}
|
||||
if !goodFunc(typ) {
|
||||
return nil, fmt.Errorf("function called with %d args; should be 1 or 2", typ.NumOut())
|
||||
}
|
||||
numIn := typ.NumIn()
|
||||
var dddType reflect.Type
|
||||
if typ.IsVariadic() {
|
||||
if len(args) < numIn-1 {
|
||||
return nil, fmt.Errorf("wrong number of args: got %d want at least %d", len(args), numIn-1)
|
||||
}
|
||||
dddType = typ.In(numIn - 1).Elem()
|
||||
} else {
|
||||
if len(args) != numIn {
|
||||
return nil, fmt.Errorf("wrong number of args: got %d want %d", len(args), numIn)
|
||||
}
|
||||
}
|
||||
argv := make([]reflect.Value, len(args))
|
||||
for i, arg := range args {
|
||||
value := reflect.ValueOf(arg)
|
||||
// Compute the expected type. Clumsy because of variadics.
|
||||
var argType reflect.Type
|
||||
if !typ.IsVariadic() || i < numIn-1 {
|
||||
argType = typ.In(i)
|
||||
} else {
|
||||
argType = dddType
|
||||
}
|
||||
if !value.IsValid() && canBeNil(argType) {
|
||||
value = reflect.Zero(argType)
|
||||
}
|
||||
if !value.Type().AssignableTo(argType) {
|
||||
return nil, fmt.Errorf("arg %d has type %s; should be %s", i, value.Type(), argType)
|
||||
}
|
||||
argv[i] = value
|
||||
}
|
||||
result := v.Call(argv)
|
||||
if len(result) == 2 && !result[1].IsNil() {
|
||||
return result[0].Interface(), result[1].Interface().(error)
|
||||
}
|
||||
return result[0].Interface(), nil
|
||||
}
|
||||
|
||||
// Boolean logic.
|
||||
|
||||
func truth(a interface{}) bool {
|
||||
t, _ := isTrue(reflect.ValueOf(a))
|
||||
return t
|
||||
}
|
||||
|
||||
// and computes the Boolean AND of its arguments, returning
|
||||
// the first false argument it encounters, or the last argument.
|
||||
func and(arg0 interface{}, args ...interface{}) interface{} {
|
||||
if !truth(arg0) {
|
||||
return arg0
|
||||
}
|
||||
for i := range args {
|
||||
arg0 = args[i]
|
||||
if !truth(arg0) {
|
||||
break
|
||||
}
|
||||
}
|
||||
return arg0
|
||||
}
|
||||
|
||||
// or computes the Boolean OR of its arguments, returning
|
||||
// the first true argument it encounters, or the last argument.
|
||||
func or(arg0 interface{}, args ...interface{}) interface{} {
|
||||
if truth(arg0) {
|
||||
return arg0
|
||||
}
|
||||
for i := range args {
|
||||
arg0 = args[i]
|
||||
if truth(arg0) {
|
||||
break
|
||||
}
|
||||
}
|
||||
return arg0
|
||||
}
|
||||
|
||||
// not returns the Boolean negation of its argument.
|
||||
func not(arg interface{}) (truth bool) {
|
||||
truth, _ = isTrue(reflect.ValueOf(arg))
|
||||
return !truth
|
||||
}
|
||||
|
||||
// Comparison.
|
||||
|
||||
// TODO: Perhaps allow comparison between signed and unsigned integers.
|
||||
|
||||
var (
|
||||
errBadComparisonType = errors.New("invalid type for comparison")
|
||||
errBadComparison = errors.New("incompatible types for comparison")
|
||||
errNoComparison = errors.New("missing argument for comparison")
|
||||
)
|
||||
|
||||
type kind int
|
||||
|
||||
const (
|
||||
invalidKind kind = iota
|
||||
boolKind
|
||||
complexKind
|
||||
intKind
|
||||
floatKind
|
||||
integerKind
|
||||
stringKind
|
||||
uintKind
|
||||
)
|
||||
|
||||
func basicKind(v reflect.Value) (kind, error) {
|
||||
switch v.Kind() {
|
||||
case reflect.Bool:
|
||||
return boolKind, nil
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return intKind, nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return uintKind, nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return floatKind, nil
|
||||
case reflect.Complex64, reflect.Complex128:
|
||||
return complexKind, nil
|
||||
case reflect.String:
|
||||
return stringKind, nil
|
||||
}
|
||||
return invalidKind, errBadComparisonType
|
||||
}
|
||||
|
||||
// eq evaluates the comparison a == b || a == c || ...
|
||||
func eq(arg1 interface{}, arg2 ...interface{}) (bool, error) {
|
||||
v1 := reflect.ValueOf(arg1)
|
||||
k1, err := basicKind(v1)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if len(arg2) == 0 {
|
||||
return false, errNoComparison
|
||||
}
|
||||
for _, arg := range arg2 {
|
||||
v2 := reflect.ValueOf(arg)
|
||||
k2, err := basicKind(v2)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
truth := false
|
||||
if k1 != k2 {
|
||||
// Special case: Can compare integer values regardless of type's sign.
|
||||
switch {
|
||||
case k1 == intKind && k2 == uintKind:
|
||||
truth = v1.Int() >= 0 && uint64(v1.Int()) == v2.Uint()
|
||||
case k1 == uintKind && k2 == intKind:
|
||||
truth = v2.Int() >= 0 && v1.Uint() == uint64(v2.Int())
|
||||
default:
|
||||
return false, errBadComparison
|
||||
}
|
||||
} else {
|
||||
switch k1 {
|
||||
case boolKind:
|
||||
truth = v1.Bool() == v2.Bool()
|
||||
case complexKind:
|
||||
truth = v1.Complex() == v2.Complex()
|
||||
case floatKind:
|
||||
truth = v1.Float() == v2.Float()
|
||||
case intKind:
|
||||
truth = v1.Int() == v2.Int()
|
||||
case stringKind:
|
||||
truth = v1.String() == v2.String()
|
||||
case uintKind:
|
||||
truth = v1.Uint() == v2.Uint()
|
||||
default:
|
||||
panic("invalid kind")
|
||||
}
|
||||
}
|
||||
if truth {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// ne evaluates the comparison a != b.
|
||||
func ne(arg1, arg2 interface{}) (bool, error) {
|
||||
// != is the inverse of ==.
|
||||
equal, err := eq(arg1, arg2)
|
||||
return !equal, err
|
||||
}
|
||||
|
||||
// lt evaluates the comparison a < b.
|
||||
func lt(arg1, arg2 interface{}) (bool, error) {
|
||||
v1 := reflect.ValueOf(arg1)
|
||||
k1, err := basicKind(v1)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
v2 := reflect.ValueOf(arg2)
|
||||
k2, err := basicKind(v2)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
truth := false
|
||||
if k1 != k2 {
|
||||
// Special case: Can compare integer values regardless of type's sign.
|
||||
switch {
|
||||
case k1 == intKind && k2 == uintKind:
|
||||
truth = v1.Int() < 0 || uint64(v1.Int()) < v2.Uint()
|
||||
case k1 == uintKind && k2 == intKind:
|
||||
truth = v2.Int() >= 0 && v1.Uint() < uint64(v2.Int())
|
||||
default:
|
||||
return false, errBadComparison
|
||||
}
|
||||
} else {
|
||||
switch k1 {
|
||||
case boolKind, complexKind:
|
||||
return false, errBadComparisonType
|
||||
case floatKind:
|
||||
truth = v1.Float() < v2.Float()
|
||||
case intKind:
|
||||
truth = v1.Int() < v2.Int()
|
||||
case stringKind:
|
||||
truth = v1.String() < v2.String()
|
||||
case uintKind:
|
||||
truth = v1.Uint() < v2.Uint()
|
||||
default:
|
||||
panic("invalid kind")
|
||||
}
|
||||
}
|
||||
return truth, nil
|
||||
}
|
||||
|
||||
// le evaluates the comparison <= b.
|
||||
func le(arg1, arg2 interface{}) (bool, error) {
|
||||
// <= is < or ==.
|
||||
lessThan, err := lt(arg1, arg2)
|
||||
if lessThan || err != nil {
|
||||
return lessThan, err
|
||||
}
|
||||
return eq(arg1, arg2)
|
||||
}
|
||||
|
||||
// gt evaluates the comparison a > b.
|
||||
func gt(arg1, arg2 interface{}) (bool, error) {
|
||||
// > is the inverse of <=.
|
||||
lessOrEqual, err := le(arg1, arg2)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return !lessOrEqual, nil
|
||||
}
|
||||
|
||||
// ge evaluates the comparison a >= b.
|
||||
func ge(arg1, arg2 interface{}) (bool, error) {
|
||||
// >= is the inverse of <.
|
||||
lessThan, err := lt(arg1, arg2)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return !lessThan, nil
|
||||
}
|
||||
|
||||
// HTML escaping.
|
||||
|
||||
var (
|
||||
htmlQuot = []byte(""") // shorter than """
|
||||
htmlApos = []byte("'") // shorter than "'" and apos was not in HTML until HTML5
|
||||
htmlAmp = []byte("&")
|
||||
htmlLt = []byte("<")
|
||||
htmlGt = []byte(">")
|
||||
)
|
||||
|
||||
// HTMLEscape writes to w the escaped HTML equivalent of the plain text data b.
|
||||
func HTMLEscape(w io.Writer, b []byte) {
|
||||
last := 0
|
||||
for i, c := range b {
|
||||
var html []byte
|
||||
switch c {
|
||||
case '"':
|
||||
html = htmlQuot
|
||||
case '\'':
|
||||
html = htmlApos
|
||||
case '&':
|
||||
html = htmlAmp
|
||||
case '<':
|
||||
html = htmlLt
|
||||
case '>':
|
||||
html = htmlGt
|
||||
default:
|
||||
continue
|
||||
}
|
||||
w.Write(b[last:i])
|
||||
w.Write(html)
|
||||
last = i + 1
|
||||
}
|
||||
w.Write(b[last:])
|
||||
}
|
||||
|
||||
// HTMLEscapeString returns the escaped HTML equivalent of the plain text data s.
|
||||
func HTMLEscapeString(s string) string {
|
||||
// Avoid allocation if we can.
|
||||
if strings.IndexAny(s, `'"&<>`) < 0 {
|
||||
return s
|
||||
}
|
||||
var b bytes.Buffer
|
||||
HTMLEscape(&b, []byte(s))
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// HTMLEscaper returns the escaped HTML equivalent of the textual
|
||||
// representation of its arguments.
|
||||
func HTMLEscaper(args ...interface{}) string {
|
||||
return HTMLEscapeString(evalArgs(args))
|
||||
}
|
||||
|
||||
// JavaScript escaping.
|
||||
|
||||
var (
|
||||
jsLowUni = []byte(`\u00`)
|
||||
hex = []byte("0123456789ABCDEF")
|
||||
|
||||
jsBackslash = []byte(`\\`)
|
||||
jsApos = []byte(`\'`)
|
||||
jsQuot = []byte(`\"`)
|
||||
jsLt = []byte(`\x3C`)
|
||||
jsGt = []byte(`\x3E`)
|
||||
)
|
||||
|
||||
// JSEscape writes to w the escaped JavaScript equivalent of the plain text data b.
|
||||
func JSEscape(w io.Writer, b []byte) {
|
||||
last := 0
|
||||
for i := 0; i < len(b); i++ {
|
||||
c := b[i]
|
||||
|
||||
if !jsIsSpecial(rune(c)) {
|
||||
// fast path: nothing to do
|
||||
continue
|
||||
}
|
||||
w.Write(b[last:i])
|
||||
|
||||
if c < utf8.RuneSelf {
|
||||
// Quotes, slashes and angle brackets get quoted.
|
||||
// Control characters get written as \u00XX.
|
||||
switch c {
|
||||
case '\\':
|
||||
w.Write(jsBackslash)
|
||||
case '\'':
|
||||
w.Write(jsApos)
|
||||
case '"':
|
||||
w.Write(jsQuot)
|
||||
case '<':
|
||||
w.Write(jsLt)
|
||||
case '>':
|
||||
w.Write(jsGt)
|
||||
default:
|
||||
w.Write(jsLowUni)
|
||||
t, b := c>>4, c&0x0f
|
||||
w.Write(hex[t : t+1])
|
||||
w.Write(hex[b : b+1])
|
||||
}
|
||||
} else {
|
||||
// Unicode rune.
|
||||
r, size := utf8.DecodeRune(b[i:])
|
||||
if unicode.IsPrint(r) {
|
||||
w.Write(b[i : i+size])
|
||||
} else {
|
||||
fmt.Fprintf(w, "\\u%04X", r)
|
||||
}
|
||||
i += size - 1
|
||||
}
|
||||
last = i + 1
|
||||
}
|
||||
w.Write(b[last:])
|
||||
}
|
||||
|
||||
// JSEscapeString returns the escaped JavaScript equivalent of the plain text data s.
|
||||
func JSEscapeString(s string) string {
|
||||
// Avoid allocation if we can.
|
||||
if strings.IndexFunc(s, jsIsSpecial) < 0 {
|
||||
return s
|
||||
}
|
||||
var b bytes.Buffer
|
||||
JSEscape(&b, []byte(s))
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func jsIsSpecial(r rune) bool {
|
||||
switch r {
|
||||
case '\\', '\'', '"', '<', '>':
|
||||
return true
|
||||
}
|
||||
return r < ' ' || utf8.RuneSelf <= r
|
||||
}
|
||||
|
||||
// JSEscaper returns the escaped JavaScript equivalent of the textual
|
||||
// representation of its arguments.
|
||||
func JSEscaper(args ...interface{}) string {
|
||||
return JSEscapeString(evalArgs(args))
|
||||
}
|
||||
|
||||
// URLQueryEscaper returns the escaped value of the textual representation of
|
||||
// its arguments in a form suitable for embedding in a URL query.
|
||||
func URLQueryEscaper(args ...interface{}) string {
|
||||
return url.QueryEscape(evalArgs(args))
|
||||
}
|
||||
|
||||
// evalArgs formats the list of arguments into a string. It is therefore equivalent to
|
||||
// fmt.Sprint(args...)
|
||||
// except that each argument is indirected (if a pointer), as required,
|
||||
// using the same rules as the default string evaluation during template
|
||||
// execution.
|
||||
func evalArgs(args []interface{}) string {
|
||||
ok := false
|
||||
var s string
|
||||
// Fast path for simple common case.
|
||||
if len(args) == 1 {
|
||||
s, ok = args[0].(string)
|
||||
}
|
||||
if !ok {
|
||||
for i, arg := range args {
|
||||
a, ok := printableValue(reflect.ValueOf(arg))
|
||||
if ok {
|
||||
args[i] = a
|
||||
} // else left fmt do its thing
|
||||
}
|
||||
s = fmt.Sprint(args...)
|
||||
}
|
||||
return s
|
||||
}
|
108
Godeps/_workspace/src/github.com/alecthomas/template/helper.go
generated
vendored
Normal file
108
Godeps/_workspace/src/github.com/alecthomas/template/helper.go
generated
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Helper functions to make constructing templates easier.
|
||||
|
||||
package template
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// Functions and methods to parse templates.
|
||||
|
||||
// Must is a helper that wraps a call to a function returning (*Template, error)
|
||||
// and panics if the error is non-nil. It is intended for use in variable
|
||||
// initializations such as
|
||||
// var t = template.Must(template.New("name").Parse("text"))
|
||||
func Must(t *Template, err error) *Template {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// ParseFiles creates a new Template and parses the template definitions from
|
||||
// the named files. The returned template's name will have the (base) name and
|
||||
// (parsed) contents of the first file. There must be at least one file.
|
||||
// If an error occurs, parsing stops and the returned *Template is nil.
|
||||
func ParseFiles(filenames ...string) (*Template, error) {
|
||||
return parseFiles(nil, filenames...)
|
||||
}
|
||||
|
||||
// ParseFiles parses the named files and associates the resulting templates with
|
||||
// t. If an error occurs, parsing stops and the returned template is nil;
|
||||
// otherwise it is t. There must be at least one file.
|
||||
func (t *Template) ParseFiles(filenames ...string) (*Template, error) {
|
||||
return parseFiles(t, filenames...)
|
||||
}
|
||||
|
||||
// parseFiles is the helper for the method and function. If the argument
|
||||
// template is nil, it is created from the first file.
|
||||
func parseFiles(t *Template, filenames ...string) (*Template, error) {
|
||||
if len(filenames) == 0 {
|
||||
// Not really a problem, but be consistent.
|
||||
return nil, fmt.Errorf("template: no files named in call to ParseFiles")
|
||||
}
|
||||
for _, filename := range filenames {
|
||||
b, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s := string(b)
|
||||
name := filepath.Base(filename)
|
||||
// First template becomes return value if not already defined,
|
||||
// and we use that one for subsequent New calls to associate
|
||||
// all the templates together. Also, if this file has the same name
|
||||
// as t, this file becomes the contents of t, so
|
||||
// t, err := New(name).Funcs(xxx).ParseFiles(name)
|
||||
// works. Otherwise we create a new template associated with t.
|
||||
var tmpl *Template
|
||||
if t == nil {
|
||||
t = New(name)
|
||||
}
|
||||
if name == t.Name() {
|
||||
tmpl = t
|
||||
} else {
|
||||
tmpl = t.New(name)
|
||||
}
|
||||
_, err = tmpl.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// ParseGlob creates a new Template and parses the template definitions from the
|
||||
// files identified by the pattern, which must match at least one file. The
|
||||
// returned template will have the (base) name and (parsed) contents of the
|
||||
// first file matched by the pattern. ParseGlob is equivalent to calling
|
||||
// ParseFiles with the list of files matched by the pattern.
|
||||
func ParseGlob(pattern string) (*Template, error) {
|
||||
return parseGlob(nil, pattern)
|
||||
}
|
||||
|
||||
// ParseGlob parses the template definitions in the files identified by the
|
||||
// pattern and associates the resulting templates with t. The pattern is
|
||||
// processed by filepath.Glob and must match at least one file. ParseGlob is
|
||||
// equivalent to calling t.ParseFiles with the list of files matched by the
|
||||
// pattern.
|
||||
func (t *Template) ParseGlob(pattern string) (*Template, error) {
|
||||
return parseGlob(t, pattern)
|
||||
}
|
||||
|
||||
// parseGlob is the implementation of the function and method ParseGlob.
|
||||
func parseGlob(t *Template, pattern string) (*Template, error) {
|
||||
filenames, err := filepath.Glob(pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(filenames) == 0 {
|
||||
return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern)
|
||||
}
|
||||
return parseFiles(t, filenames...)
|
||||
}
|
292
Godeps/_workspace/src/github.com/alecthomas/template/multi_test.go
generated
vendored
Normal file
292
Godeps/_workspace/src/github.com/alecthomas/template/multi_test.go
generated
vendored
Normal file
|
@ -0,0 +1,292 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package template
|
||||
|
||||
// Tests for mulitple-template parsing and execution.
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/alecthomas/template/parse"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const (
|
||||
noError = true
|
||||
hasError = false
|
||||
)
|
||||
|
||||
type multiParseTest struct {
|
||||
name string
|
||||
input string
|
||||
ok bool
|
||||
names []string
|
||||
results []string
|
||||
}
|
||||
|
||||
var multiParseTests = []multiParseTest{
|
||||
{"empty", "", noError,
|
||||
nil,
|
||||
nil},
|
||||
{"one", `{{define "foo"}} FOO {{end}}`, noError,
|
||||
[]string{"foo"},
|
||||
[]string{" FOO "}},
|
||||
{"two", `{{define "foo"}} FOO {{end}}{{define "bar"}} BAR {{end}}`, noError,
|
||||
[]string{"foo", "bar"},
|
||||
[]string{" FOO ", " BAR "}},
|
||||
// errors
|
||||
{"missing end", `{{define "foo"}} FOO `, hasError,
|
||||
nil,
|
||||
nil},
|
||||
{"malformed name", `{{define "foo}} FOO `, hasError,
|
||||
nil,
|
||||
nil},
|
||||
}
|
||||
|
||||
func TestMultiParse(t *testing.T) {
|
||||
for _, test := range multiParseTests {
|
||||
template, err := New("root").Parse(test.input)
|
||||
switch {
|
||||
case err == nil && !test.ok:
|
||||
t.Errorf("%q: expected error; got none", test.name)
|
||||
continue
|
||||
case err != nil && test.ok:
|
||||
t.Errorf("%q: unexpected error: %v", test.name, err)
|
||||
continue
|
||||
case err != nil && !test.ok:
|
||||
// expected error, got one
|
||||
if *debug {
|
||||
fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if template == nil {
|
||||
continue
|
||||
}
|
||||
if len(template.tmpl) != len(test.names)+1 { // +1 for root
|
||||
t.Errorf("%s: wrong number of templates; wanted %d got %d", test.name, len(test.names), len(template.tmpl))
|
||||
continue
|
||||
}
|
||||
for i, name := range test.names {
|
||||
tmpl, ok := template.tmpl[name]
|
||||
if !ok {
|
||||
t.Errorf("%s: can't find template %q", test.name, name)
|
||||
continue
|
||||
}
|
||||
result := tmpl.Root.String()
|
||||
if result != test.results[i] {
|
||||
t.Errorf("%s=(%q): got\n\t%v\nexpected\n\t%v", test.name, test.input, result, test.results[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var multiExecTests = []execTest{
|
||||
{"empty", "", "", nil, true},
|
||||
{"text", "some text", "some text", nil, true},
|
||||
{"invoke x", `{{template "x" .SI}}`, "TEXT", tVal, true},
|
||||
{"invoke x no args", `{{template "x"}}`, "TEXT", tVal, true},
|
||||
{"invoke dot int", `{{template "dot" .I}}`, "17", tVal, true},
|
||||
{"invoke dot []int", `{{template "dot" .SI}}`, "[3 4 5]", tVal, true},
|
||||
{"invoke dotV", `{{template "dotV" .U}}`, "v", tVal, true},
|
||||
{"invoke nested int", `{{template "nested" .I}}`, "17", tVal, true},
|
||||
{"variable declared by template", `{{template "nested" $x:=.SI}},{{index $x 1}}`, "[3 4 5],4", tVal, true},
|
||||
|
||||
// User-defined function: test argument evaluator.
|
||||
{"testFunc literal", `{{oneArg "joe"}}`, "oneArg=joe", tVal, true},
|
||||
{"testFunc .", `{{oneArg .}}`, "oneArg=joe", "joe", true},
|
||||
}
|
||||
|
||||
// These strings are also in testdata/*.
|
||||
const multiText1 = `
|
||||
{{define "x"}}TEXT{{end}}
|
||||
{{define "dotV"}}{{.V}}{{end}}
|
||||
`
|
||||
|
||||
const multiText2 = `
|
||||
{{define "dot"}}{{.}}{{end}}
|
||||
{{define "nested"}}{{template "dot" .}}{{end}}
|
||||
`
|
||||
|
||||
func TestMultiExecute(t *testing.T) {
|
||||
// Declare a couple of templates first.
|
||||
template, err := New("root").Parse(multiText1)
|
||||
if err != nil {
|
||||
t.Fatalf("parse error for 1: %s", err)
|
||||
}
|
||||
_, err = template.Parse(multiText2)
|
||||
if err != nil {
|
||||
t.Fatalf("parse error for 2: %s", err)
|
||||
}
|
||||
testExecute(multiExecTests, template, t)
|
||||
}
|
||||
|
||||
func TestParseFiles(t *testing.T) {
|
||||
_, err := ParseFiles("DOES NOT EXIST")
|
||||
if err == nil {
|
||||
t.Error("expected error for non-existent file; got none")
|
||||
}
|
||||
template := New("root")
|
||||
_, err = template.ParseFiles("testdata/file1.tmpl", "testdata/file2.tmpl")
|
||||
if err != nil {
|
||||
t.Fatalf("error parsing files: %v", err)
|
||||
}
|
||||
testExecute(multiExecTests, template, t)
|
||||
}
|
||||
|
||||
func TestParseGlob(t *testing.T) {
|
||||
_, err := ParseGlob("DOES NOT EXIST")
|
||||
if err == nil {
|
||||
t.Error("expected error for non-existent file; got none")
|
||||
}
|
||||
_, err = New("error").ParseGlob("[x")
|
||||
if err == nil {
|
||||
t.Error("expected error for bad pattern; got none")
|
||||
}
|
||||
template := New("root")
|
||||
_, err = template.ParseGlob("testdata/file*.tmpl")
|
||||
if err != nil {
|
||||
t.Fatalf("error parsing files: %v", err)
|
||||
}
|
||||
testExecute(multiExecTests, template, t)
|
||||
}
|
||||
|
||||
// In these tests, actual content (not just template definitions) comes from the parsed files.
|
||||
|
||||
var templateFileExecTests = []execTest{
|
||||
{"test", `{{template "tmpl1.tmpl"}}{{template "tmpl2.tmpl"}}`, "template1\n\ny\ntemplate2\n\nx\n", 0, true},
|
||||
}
|
||||
|
||||
func TestParseFilesWithData(t *testing.T) {
|
||||
template, err := New("root").ParseFiles("testdata/tmpl1.tmpl", "testdata/tmpl2.tmpl")
|
||||
if err != nil {
|
||||
t.Fatalf("error parsing files: %v", err)
|
||||
}
|
||||
testExecute(templateFileExecTests, template, t)
|
||||
}
|
||||
|
||||
func TestParseGlobWithData(t *testing.T) {
|
||||
template, err := New("root").ParseGlob("testdata/tmpl*.tmpl")
|
||||
if err != nil {
|
||||
t.Fatalf("error parsing files: %v", err)
|
||||
}
|
||||
testExecute(templateFileExecTests, template, t)
|
||||
}
|
||||
|
||||
const (
|
||||
cloneText1 = `{{define "a"}}{{template "b"}}{{template "c"}}{{end}}`
|
||||
cloneText2 = `{{define "b"}}b{{end}}`
|
||||
cloneText3 = `{{define "c"}}root{{end}}`
|
||||
cloneText4 = `{{define "c"}}clone{{end}}`
|
||||
)
|
||||
|
||||
func TestClone(t *testing.T) {
|
||||
// Create some templates and clone the root.
|
||||
root, err := New("root").Parse(cloneText1)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
_, err = root.Parse(cloneText2)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
clone := Must(root.Clone())
|
||||
// Add variants to both.
|
||||
_, err = root.Parse(cloneText3)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
_, err = clone.Parse(cloneText4)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// Verify that the clone is self-consistent.
|
||||
for k, v := range clone.tmpl {
|
||||
if k == clone.name && v.tmpl[k] != clone {
|
||||
t.Error("clone does not contain root")
|
||||
}
|
||||
if v != v.tmpl[v.name] {
|
||||
t.Errorf("clone does not contain self for %q", k)
|
||||
}
|
||||
}
|
||||
// Execute root.
|
||||
var b bytes.Buffer
|
||||
err = root.ExecuteTemplate(&b, "a", 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if b.String() != "broot" {
|
||||
t.Errorf("expected %q got %q", "broot", b.String())
|
||||
}
|
||||
// Execute copy.
|
||||
b.Reset()
|
||||
err = clone.ExecuteTemplate(&b, "a", 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if b.String() != "bclone" {
|
||||
t.Errorf("expected %q got %q", "bclone", b.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestAddParseTree(t *testing.T) {
|
||||
// Create some templates.
|
||||
root, err := New("root").Parse(cloneText1)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
_, err = root.Parse(cloneText2)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// Add a new parse tree.
|
||||
tree, err := parse.Parse("cloneText3", cloneText3, "", "", nil, builtins)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
added, err := root.AddParseTree("c", tree["c"])
|
||||
// Execute.
|
||||
var b bytes.Buffer
|
||||
err = added.ExecuteTemplate(&b, "a", 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if b.String() != "broot" {
|
||||
t.Errorf("expected %q got %q", "broot", b.String())
|
||||
}
|
||||
}
|
||||
|
||||
// Issue 7032
|
||||
func TestAddParseTreeToUnparsedTemplate(t *testing.T) {
|
||||
master := "{{define \"master\"}}{{end}}"
|
||||
tmpl := New("master")
|
||||
tree, err := parse.Parse("master", master, "", "", nil)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected parse err: %v", err)
|
||||
}
|
||||
masterTree := tree["master"]
|
||||
tmpl.AddParseTree("master", masterTree) // used to panic
|
||||
}
|
||||
|
||||
func TestRedefinition(t *testing.T) {
|
||||
var tmpl *Template
|
||||
var err error
|
||||
if tmpl, err = New("tmpl1").Parse(`{{define "test"}}foo{{end}}`); err != nil {
|
||||
t.Fatalf("parse 1: %v", err)
|
||||
}
|
||||
if _, err = tmpl.Parse(`{{define "test"}}bar{{end}}`); err == nil {
|
||||
t.Fatal("expected error")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "redefinition") {
|
||||
t.Fatalf("expected redefinition error; got %v", err)
|
||||
}
|
||||
if _, err = tmpl.New("tmpl2").Parse(`{{define "test"}}bar{{end}}`); err == nil {
|
||||
t.Fatal("expected error")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "redefinition") {
|
||||
t.Fatalf("expected redefinition error; got %v", err)
|
||||
}
|
||||
}
|
556
Godeps/_workspace/src/github.com/alecthomas/template/parse/lex.go
generated
vendored
Normal file
556
Godeps/_workspace/src/github.com/alecthomas/template/parse/lex.go
generated
vendored
Normal file
|
@ -0,0 +1,556 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package parse
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// item represents a token or text string returned from the scanner.
|
||||
type item struct {
|
||||
typ itemType // The type of this item.
|
||||
pos Pos // The starting position, in bytes, of this item in the input string.
|
||||
val string // The value of this item.
|
||||
}
|
||||
|
||||
func (i item) String() string {
|
||||
switch {
|
||||
case i.typ == itemEOF:
|
||||
return "EOF"
|
||||
case i.typ == itemError:
|
||||
return i.val
|
||||
case i.typ > itemKeyword:
|
||||
return fmt.Sprintf("<%s>", i.val)
|
||||
case len(i.val) > 10:
|
||||
return fmt.Sprintf("%.10q...", i.val)
|
||||
}
|
||||
return fmt.Sprintf("%q", i.val)
|
||||
}
|
||||
|
||||
// itemType identifies the type of lex items.
|
||||
type itemType int
|
||||
|
||||
const (
|
||||
itemError itemType = iota // error occurred; value is text of error
|
||||
itemBool // boolean constant
|
||||
itemChar // printable ASCII character; grab bag for comma etc.
|
||||
itemCharConstant // character constant
|
||||
itemComplex // complex constant (1+2i); imaginary is just a number
|
||||
itemColonEquals // colon-equals (':=') introducing a declaration
|
||||
itemEOF
|
||||
itemField // alphanumeric identifier starting with '.'
|
||||
itemIdentifier // alphanumeric identifier not starting with '.'
|
||||
itemLeftDelim // left action delimiter
|
||||
itemLeftParen // '(' inside action
|
||||
itemNumber // simple number, including imaginary
|
||||
itemPipe // pipe symbol
|
||||
itemRawString // raw quoted string (includes quotes)
|
||||
itemRightDelim // right action delimiter
|
||||
itemElideNewline // elide newline after right delim
|
||||
itemRightParen // ')' inside action
|
||||
itemSpace // run of spaces separating arguments
|
||||
itemString // quoted string (includes quotes)
|
||||
itemText // plain text
|
||||
itemVariable // variable starting with '$', such as '$' or '$1' or '$hello'
|
||||
// Keywords appear after all the rest.
|
||||
itemKeyword // used only to delimit the keywords
|
||||
itemDot // the cursor, spelled '.'
|
||||
itemDefine // define keyword
|
||||
itemElse // else keyword
|
||||
itemEnd // end keyword
|
||||
itemIf // if keyword
|
||||
itemNil // the untyped nil constant, easiest to treat as a keyword
|
||||
itemRange // range keyword
|
||||
itemTemplate // template keyword
|
||||
itemWith // with keyword
|
||||
)
|
||||
|
||||
var key = map[string]itemType{
|
||||
".": itemDot,
|
||||
"define": itemDefine,
|
||||
"else": itemElse,
|
||||
"end": itemEnd,
|
||||
"if": itemIf,
|
||||
"range": itemRange,
|
||||
"nil": itemNil,
|
||||
"template": itemTemplate,
|
||||
"with": itemWith,
|
||||
}
|
||||
|
||||
const eof = -1
|
||||
|
||||
// stateFn represents the state of the scanner as a function that returns the next state.
|
||||
type stateFn func(*lexer) stateFn
|
||||
|
||||
// lexer holds the state of the scanner.
|
||||
type lexer struct {
|
||||
name string // the name of the input; used only for error reports
|
||||
input string // the string being scanned
|
||||
leftDelim string // start of action
|
||||
rightDelim string // end of action
|
||||
state stateFn // the next lexing function to enter
|
||||
pos Pos // current position in the input
|
||||
start Pos // start position of this item
|
||||
width Pos // width of last rune read from input
|
||||
lastPos Pos // position of most recent item returned by nextItem
|
||||
items chan item // channel of scanned items
|
||||
parenDepth int // nesting depth of ( ) exprs
|
||||
}
|
||||
|
||||
// next returns the next rune in the input.
|
||||
func (l *lexer) next() rune {
|
||||
if int(l.pos) >= len(l.input) {
|
||||
l.width = 0
|
||||
return eof
|
||||
}
|
||||
r, w := utf8.DecodeRuneInString(l.input[l.pos:])
|
||||
l.width = Pos(w)
|
||||
l.pos += l.width
|
||||
return r
|
||||
}
|
||||
|
||||
// peek returns but does not consume the next rune in the input.
|
||||
func (l *lexer) peek() rune {
|
||||
r := l.next()
|
||||
l.backup()
|
||||
return r
|
||||
}
|
||||
|
||||
// backup steps back one rune. Can only be called once per call of next.
|
||||
func (l *lexer) backup() {
|
||||
l.pos -= l.width
|
||||
}
|
||||
|
||||
// emit passes an item back to the client.
|
||||
func (l *lexer) emit(t itemType) {
|
||||
l.items <- item{t, l.start, l.input[l.start:l.pos]}
|
||||
l.start = l.pos
|
||||
}
|
||||
|
||||
// ignore skips over the pending input before this point.
|
||||
func (l *lexer) ignore() {
|
||||
l.start = l.pos
|
||||
}
|
||||
|
||||
// accept consumes the next rune if it's from the valid set.
|
||||
func (l *lexer) accept(valid string) bool {
|
||||
if strings.IndexRune(valid, l.next()) >= 0 {
|
||||
return true
|
||||
}
|
||||
l.backup()
|
||||
return false
|
||||
}
|
||||
|
||||
// acceptRun consumes a run of runes from the valid set.
|
||||
func (l *lexer) acceptRun(valid string) {
|
||||
for strings.IndexRune(valid, l.next()) >= 0 {
|
||||
}
|
||||
l.backup()
|
||||
}
|
||||
|
||||
// lineNumber reports which line we're on, based on the position of
|
||||
// the previous item returned by nextItem. Doing it this way
|
||||
// means we don't have to worry about peek double counting.
|
||||
func (l *lexer) lineNumber() int {
|
||||
return 1 + strings.Count(l.input[:l.lastPos], "\n")
|
||||
}
|
||||
|
||||
// errorf returns an error token and terminates the scan by passing
|
||||
// back a nil pointer that will be the next state, terminating l.nextItem.
|
||||
func (l *lexer) errorf(format string, args ...interface{}) stateFn {
|
||||
l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)}
|
||||
return nil
|
||||
}
|
||||
|
||||
// nextItem returns the next item from the input.
|
||||
func (l *lexer) nextItem() item {
|
||||
item := <-l.items
|
||||
l.lastPos = item.pos
|
||||
return item
|
||||
}
|
||||
|
||||
// lex creates a new scanner for the input string.
|
||||
func lex(name, input, left, right string) *lexer {
|
||||
if left == "" {
|
||||
left = leftDelim
|
||||
}
|
||||
if right == "" {
|
||||
right = rightDelim
|
||||
}
|
||||
l := &lexer{
|
||||
name: name,
|
||||
input: input,
|
||||
leftDelim: left,
|
||||
rightDelim: right,
|
||||
items: make(chan item),
|
||||
}
|
||||
go l.run()
|
||||
return l
|
||||
}
|
||||
|
||||
// run runs the state machine for the lexer.
|
||||
func (l *lexer) run() {
|
||||
for l.state = lexText; l.state != nil; {
|
||||
l.state = l.state(l)
|
||||
}
|
||||
}
|
||||
|
||||
// state functions
|
||||
|
||||
const (
|
||||
leftDelim = "{{"
|
||||
rightDelim = "}}"
|
||||
leftComment = "/*"
|
||||
rightComment = "*/"
|
||||
)
|
||||
|
||||
// lexText scans until an opening action delimiter, "{{".
|
||||
func lexText(l *lexer) stateFn {
|
||||
for {
|
||||
if strings.HasPrefix(l.input[l.pos:], l.leftDelim) {
|
||||
if l.pos > l.start {
|
||||
l.emit(itemText)
|
||||
}
|
||||
return lexLeftDelim
|
||||
}
|
||||
if l.next() == eof {
|
||||
break
|
||||
}
|
||||
}
|
||||
// Correctly reached EOF.
|
||||
if l.pos > l.start {
|
||||
l.emit(itemText)
|
||||
}
|
||||
l.emit(itemEOF)
|
||||
return nil
|
||||
}
|
||||
|
||||
// lexLeftDelim scans the left delimiter, which is known to be present.
|
||||
func lexLeftDelim(l *lexer) stateFn {
|
||||
l.pos += Pos(len(l.leftDelim))
|
||||
if strings.HasPrefix(l.input[l.pos:], leftComment) {
|
||||
return lexComment
|
||||
}
|
||||
l.emit(itemLeftDelim)
|
||||
l.parenDepth = 0
|
||||
return lexInsideAction
|
||||
}
|
||||
|
||||
// lexComment scans a comment. The left comment marker is known to be present.
|
||||
func lexComment(l *lexer) stateFn {
|
||||
l.pos += Pos(len(leftComment))
|
||||
i := strings.Index(l.input[l.pos:], rightComment)
|
||||
if i < 0 {
|
||||
return l.errorf("unclosed comment")
|
||||
}
|
||||
l.pos += Pos(i + len(rightComment))
|
||||
if !strings.HasPrefix(l.input[l.pos:], l.rightDelim) {
|
||||
return l.errorf("comment ends before closing delimiter")
|
||||
|
||||
}
|
||||
l.pos += Pos(len(l.rightDelim))
|
||||
l.ignore()
|
||||
return lexText
|
||||
}
|
||||
|
||||
// lexRightDelim scans the right delimiter, which is known to be present.
|
||||
func lexRightDelim(l *lexer) stateFn {
|
||||
l.pos += Pos(len(l.rightDelim))
|
||||
l.emit(itemRightDelim)
|
||||
if l.peek() == '\\' {
|
||||
l.pos++
|
||||
l.emit(itemElideNewline)
|
||||
}
|
||||
return lexText
|
||||
}
|
||||
|
||||
// lexInsideAction scans the elements inside action delimiters.
|
||||
func lexInsideAction(l *lexer) stateFn {
|
||||
// Either number, quoted string, or identifier.
|
||||
// Spaces separate arguments; runs of spaces turn into itemSpace.
|
||||
// Pipe symbols separate and are emitted.
|
||||
if strings.HasPrefix(l.input[l.pos:], l.rightDelim+"\\") || strings.HasPrefix(l.input[l.pos:], l.rightDelim) {
|
||||
if l.parenDepth == 0 {
|
||||
return lexRightDelim
|
||||
}
|
||||
return l.errorf("unclosed left paren")
|
||||
}
|
||||
switch r := l.next(); {
|
||||
case r == eof || isEndOfLine(r):
|
||||
return l.errorf("unclosed action")
|
||||
case isSpace(r):
|
||||
return lexSpace
|
||||
case r == ':':
|
||||
if l.next() != '=' {
|
||||
return l.errorf("expected :=")
|
||||
}
|
||||
l.emit(itemColonEquals)
|
||||
case r == '|':
|
||||
l.emit(itemPipe)
|
||||
case r == '"':
|
||||
return lexQuote
|
||||
case r == '`':
|
||||
return lexRawQuote
|
||||
case r == '$':
|
||||
return lexVariable
|
||||
case r == '\'':
|
||||
return lexChar
|
||||
case r == '.':
|
||||
// special look-ahead for ".field" so we don't break l.backup().
|
||||
if l.pos < Pos(len(l.input)) {
|
||||
r := l.input[l.pos]
|
||||
if r < '0' || '9' < r {
|
||||
return lexField
|
||||
}
|
||||
}
|
||||
fallthrough // '.' can start a number.
|
||||
case r == '+' || r == '-' || ('0' <= r && r <= '9'):
|
||||
l.backup()
|
||||
return lexNumber
|
||||
case isAlphaNumeric(r):
|
||||
l.backup()
|
||||
return lexIdentifier
|
||||
case r == '(':
|
||||
l.emit(itemLeftParen)
|
||||
l.parenDepth++
|
||||
return lexInsideAction
|
||||
case r == ')':
|
||||
l.emit(itemRightParen)
|
||||
l.parenDepth--
|
||||
if l.parenDepth < 0 {
|
||||
return l.errorf("unexpected right paren %#U", r)
|
||||
}
|
||||
return lexInsideAction
|
||||
case r <= unicode.MaxASCII && unicode.IsPrint(r):
|
||||
l.emit(itemChar)
|
||||
return lexInsideAction
|
||||
default:
|
||||
return l.errorf("unrecognized character in action: %#U", r)
|
||||
}
|
||||
return lexInsideAction
|
||||
}
|
||||
|
||||
// lexSpace scans a run of space characters.
|
||||
// One space has already been seen.
|
||||
func lexSpace(l *lexer) stateFn {
|
||||
for isSpace(l.peek()) {
|
||||
l.next()
|
||||
}
|
||||
l.emit(itemSpace)
|
||||
return lexInsideAction
|
||||
}
|
||||
|
||||
// lexIdentifier scans an alphanumeric.
|
||||
func lexIdentifier(l *lexer) stateFn {
|
||||
Loop:
|
||||
for {
|
||||
switch r := l.next(); {
|
||||
case isAlphaNumeric(r):
|
||||
// absorb.
|
||||
default:
|
||||
l.backup()
|
||||
word := l.input[l.start:l.pos]
|
||||
if !l.atTerminator() {
|
||||
return l.errorf("bad character %#U", r)
|
||||
}
|
||||
switch {
|
||||
case key[word] > itemKeyword:
|
||||
l.emit(key[word])
|
||||
case word[0] == '.':
|
||||
l.emit(itemField)
|
||||
case word == "true", word == "false":
|
||||
l.emit(itemBool)
|
||||
default:
|
||||
l.emit(itemIdentifier)
|
||||
}
|
||||
break Loop
|
||||
}
|
||||
}
|
||||
return lexInsideAction
|
||||
}
|
||||
|
||||
// lexField scans a field: .Alphanumeric.
|
||||
// The . has been scanned.
|
||||
func lexField(l *lexer) stateFn {
|
||||
return lexFieldOrVariable(l, itemField)
|
||||
}
|
||||
|
||||
// lexVariable scans a Variable: $Alphanumeric.
|
||||
// The $ has been scanned.
|
||||
func lexVariable(l *lexer) stateFn {
|
||||
if l.atTerminator() { // Nothing interesting follows -> "$".
|
||||
l.emit(itemVariable)
|
||||
return lexInsideAction
|
||||
}
|
||||
return lexFieldOrVariable(l, itemVariable)
|
||||
}
|
||||
|
||||
// lexVariable scans a field or variable: [.$]Alphanumeric.
|
||||
// The . or $ has been scanned.
|
||||
func lexFieldOrVariable(l *lexer, typ itemType) stateFn {
|
||||
if l.atTerminator() { // Nothing interesting follows -> "." or "$".
|
||||
if typ == itemVariable {
|
||||
l.emit(itemVariable)
|
||||
} else {
|
||||
l.emit(itemDot)
|
||||
}
|
||||
return lexInsideAction
|
||||
}
|
||||
var r rune
|
||||
for {
|
||||
r = l.next()
|
||||
if !isAlphaNumeric(r) {
|
||||
l.backup()
|
||||
break
|
||||
}
|
||||
}
|
||||
if !l.atTerminator() {
|
||||
return l.errorf("bad character %#U", r)
|
||||
}
|
||||
l.emit(typ)
|
||||
return lexInsideAction
|
||||
}
|
||||
|
||||
// atTerminator reports whether the input is at valid termination character to
|
||||
// appear after an identifier. Breaks .X.Y into two pieces. Also catches cases
|
||||
// like "$x+2" not being acceptable without a space, in case we decide one
|
||||
// day to implement arithmetic.
|
||||
func (l *lexer) atTerminator() bool {
|
||||
r := l.peek()
|
||||
if isSpace(r) || isEndOfLine(r) {
|
||||
return true
|
||||
}
|
||||
switch r {
|
||||
case eof, '.', ',', '|', ':', ')', '(':
|
||||
return true
|
||||
}
|
||||
// Does r start the delimiter? This can be ambiguous (with delim=="//", $x/2 will
|
||||
// succeed but should fail) but only in extremely rare cases caused by willfully
|
||||
// bad choice of delimiter.
|
||||
if rd, _ := utf8.DecodeRuneInString(l.rightDelim); rd == r {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// lexChar scans a character constant. The initial quote is already
|
||||
// scanned. Syntax checking is done by the parser.
|
||||
func lexChar(l *lexer) stateFn {
|
||||
Loop:
|
||||
for {
|
||||
switch l.next() {
|
||||
case '\\':
|
||||
if r := l.next(); r != eof && r != '\n' {
|
||||
break
|
||||
}
|
||||
fallthrough
|
||||
case eof, '\n':
|
||||
return l.errorf("unterminated character constant")
|
||||
case '\'':
|
||||
break Loop
|
||||
}
|
||||
}
|
||||
l.emit(itemCharConstant)
|
||||
return lexInsideAction
|
||||
}
|
||||
|
||||
// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This
|
||||
// isn't a perfect number scanner - for instance it accepts "." and "0x0.2"
|
||||
// and "089" - but when it's wrong the input is invalid and the parser (via
|
||||
// strconv) will notice.
|
||||
func lexNumber(l *lexer) stateFn {
|
||||
if !l.scanNumber() {
|
||||
return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
|
||||
}
|
||||
if sign := l.peek(); sign == '+' || sign == '-' {
|
||||
// Complex: 1+2i. No spaces, must end in 'i'.
|
||||
if !l.scanNumber() || l.input[l.pos-1] != 'i' {
|
||||
return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
|
||||
}
|
||||
l.emit(itemComplex)
|
||||
} else {
|
||||
l.emit(itemNumber)
|
||||
}
|
||||
return lexInsideAction
|
||||
}
|
||||
|
||||
func (l *lexer) scanNumber() bool {
|
||||
// Optional leading sign.
|
||||
l.accept("+-")
|
||||
// Is it hex?
|
||||
digits := "0123456789"
|
||||
if l.accept("0") && l.accept("xX") {
|
||||
digits = "0123456789abcdefABCDEF"
|
||||
}
|
||||
l.acceptRun(digits)
|
||||
if l.accept(".") {
|
||||
l.acceptRun(digits)
|
||||
}
|
||||
if l.accept("eE") {
|
||||
l.accept("+-")
|
||||
l.acceptRun("0123456789")
|
||||
}
|
||||
// Is it imaginary?
|
||||
l.accept("i")
|
||||
// Next thing mustn't be alphanumeric.
|
||||
if isAlphaNumeric(l.peek()) {
|
||||
l.next()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// lexQuote scans a quoted string.
|
||||
func lexQuote(l *lexer) stateFn {
|
||||
Loop:
|
||||
for {
|
||||
switch l.next() {
|
||||
case '\\':
|
||||
if r := l.next(); r != eof && r != '\n' {
|
||||
break
|
||||
}
|
||||
fallthrough
|
||||
case eof, '\n':
|
||||
return l.errorf("unterminated quoted string")
|
||||
case '"':
|
||||
break Loop
|
||||
}
|
||||
}
|
||||
l.emit(itemString)
|
||||
return lexInsideAction
|
||||
}
|
||||
|
||||
// lexRawQuote scans a raw quoted string.
|
||||
func lexRawQuote(l *lexer) stateFn {
|
||||
Loop:
|
||||
for {
|
||||
switch l.next() {
|
||||
case eof, '\n':
|
||||
return l.errorf("unterminated raw quoted string")
|
||||
case '`':
|
||||
break Loop
|
||||
}
|
||||
}
|
||||
l.emit(itemRawString)
|
||||
return lexInsideAction
|
||||
}
|
||||
|
||||
// isSpace reports whether r is a space character.
|
||||
func isSpace(r rune) bool {
|
||||
return r == ' ' || r == '\t'
|
||||
}
|
||||
|
||||
// isEndOfLine reports whether r is an end-of-line character.
|
||||
func isEndOfLine(r rune) bool {
|
||||
return r == '\r' || r == '\n'
|
||||
}
|
||||
|
||||
// isAlphaNumeric reports whether r is an alphabetic, digit, or underscore.
|
||||
func isAlphaNumeric(r rune) bool {
|
||||
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
|
||||
}
|
468
Godeps/_workspace/src/github.com/alecthomas/template/parse/lex_test.go
generated
vendored
Normal file
468
Godeps/_workspace/src/github.com/alecthomas/template/parse/lex_test.go
generated
vendored
Normal file
|
@ -0,0 +1,468 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package parse
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Make the types prettyprint.
|
||||
var itemName = map[itemType]string{
|
||||
itemError: "error",
|
||||
itemBool: "bool",
|
||||
itemChar: "char",
|
||||
itemCharConstant: "charconst",
|
||||
itemComplex: "complex",
|
||||
itemColonEquals: ":=",
|
||||
itemEOF: "EOF",
|
||||
itemField: "field",
|
||||
itemIdentifier: "identifier",
|
||||
itemLeftDelim: "left delim",
|
||||
itemLeftParen: "(",
|
||||
itemNumber: "number",
|
||||
itemPipe: "pipe",
|
||||
itemRawString: "raw string",
|
||||
itemRightDelim: "right delim",
|
||||
itemElideNewline: "elide newline",
|
||||
itemRightParen: ")",
|
||||
itemSpace: "space",
|
||||
itemString: "string",
|
||||
itemVariable: "variable",
|
||||
|
||||
// keywords
|
||||
itemDot: ".",
|
||||
itemDefine: "define",
|
||||
itemElse: "else",
|
||||
itemIf: "if",
|
||||
itemEnd: "end",
|
||||
itemNil: "nil",
|
||||
itemRange: "range",
|
||||
itemTemplate: "template",
|
||||
itemWith: "with",
|
||||
}
|
||||
|
||||
func (i itemType) String() string {
|
||||
s := itemName[i]
|
||||
if s == "" {
|
||||
return fmt.Sprintf("item%d", int(i))
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type lexTest struct {
|
||||
name string
|
||||
input string
|
||||
items []item
|
||||
}
|
||||
|
||||
var (
|
||||
tEOF = item{itemEOF, 0, ""}
|
||||
tFor = item{itemIdentifier, 0, "for"}
|
||||
tLeft = item{itemLeftDelim, 0, "{{"}
|
||||
tLpar = item{itemLeftParen, 0, "("}
|
||||
tPipe = item{itemPipe, 0, "|"}
|
||||
tQuote = item{itemString, 0, `"abc \n\t\" "`}
|
||||
tRange = item{itemRange, 0, "range"}
|
||||
tRight = item{itemRightDelim, 0, "}}"}
|
||||
tElideNewline = item{itemElideNewline, 0, "\\"}
|
||||
tRpar = item{itemRightParen, 0, ")"}
|
||||
tSpace = item{itemSpace, 0, " "}
|
||||
raw = "`" + `abc\n\t\" ` + "`"
|
||||
tRawQuote = item{itemRawString, 0, raw}
|
||||
)
|
||||
|
||||
var lexTests = []lexTest{
|
||||
{"empty", "", []item{tEOF}},
|
||||
{"spaces", " \t\n", []item{{itemText, 0, " \t\n"}, tEOF}},
|
||||
{"text", `now is the time`, []item{{itemText, 0, "now is the time"}, tEOF}},
|
||||
{"elide newline", "{{}}\\", []item{tLeft, tRight, tElideNewline, tEOF}},
|
||||
{"text with comment", "hello-{{/* this is a comment */}}-world", []item{
|
||||
{itemText, 0, "hello-"},
|
||||
{itemText, 0, "-world"},
|
||||
tEOF,
|
||||
}},
|
||||
{"punctuation", "{{,@% }}", []item{
|
||||
tLeft,
|
||||
{itemChar, 0, ","},
|
||||
{itemChar, 0, "@"},
|
||||
{itemChar, 0, "%"},
|
||||
tSpace,
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"parens", "{{((3))}}", []item{
|
||||
tLeft,
|
||||
tLpar,
|
||||
tLpar,
|
||||
{itemNumber, 0, "3"},
|
||||
tRpar,
|
||||
tRpar,
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"empty action", `{{}}`, []item{tLeft, tRight, tEOF}},
|
||||
{"for", `{{for}}`, []item{tLeft, tFor, tRight, tEOF}},
|
||||
{"quote", `{{"abc \n\t\" "}}`, []item{tLeft, tQuote, tRight, tEOF}},
|
||||
{"raw quote", "{{" + raw + "}}", []item{tLeft, tRawQuote, tRight, tEOF}},
|
||||
{"numbers", "{{1 02 0x14 -7.2i 1e3 +1.2e-4 4.2i 1+2i}}", []item{
|
||||
tLeft,
|
||||
{itemNumber, 0, "1"},
|
||||
tSpace,
|
||||
{itemNumber, 0, "02"},
|
||||
tSpace,
|
||||
{itemNumber, 0, "0x14"},
|
||||
tSpace,
|
||||
{itemNumber, 0, "-7.2i"},
|
||||
tSpace,
|
||||
{itemNumber, 0, "1e3"},
|
||||
tSpace,
|
||||
{itemNumber, 0, "+1.2e-4"},
|
||||
tSpace,
|
||||
{itemNumber, 0, "4.2i"},
|
||||
tSpace,
|
||||
{itemComplex, 0, "1+2i"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"characters", `{{'a' '\n' '\'' '\\' '\u00FF' '\xFF' '本'}}`, []item{
|
||||
tLeft,
|
||||
{itemCharConstant, 0, `'a'`},
|
||||
tSpace,
|
||||
{itemCharConstant, 0, `'\n'`},
|
||||
tSpace,
|
||||
{itemCharConstant, 0, `'\''`},
|
||||
tSpace,
|
||||
{itemCharConstant, 0, `'\\'`},
|
||||
tSpace,
|
||||
{itemCharConstant, 0, `'\u00FF'`},
|
||||
tSpace,
|
||||
{itemCharConstant, 0, `'\xFF'`},
|
||||
tSpace,
|
||||
{itemCharConstant, 0, `'本'`},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"bools", "{{true false}}", []item{
|
||||
tLeft,
|
||||
{itemBool, 0, "true"},
|
||||
tSpace,
|
||||
{itemBool, 0, "false"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"dot", "{{.}}", []item{
|
||||
tLeft,
|
||||
{itemDot, 0, "."},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"nil", "{{nil}}", []item{
|
||||
tLeft,
|
||||
{itemNil, 0, "nil"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"dots", "{{.x . .2 .x.y.z}}", []item{
|
||||
tLeft,
|
||||
{itemField, 0, ".x"},
|
||||
tSpace,
|
||||
{itemDot, 0, "."},
|
||||
tSpace,
|
||||
{itemNumber, 0, ".2"},
|
||||
tSpace,
|
||||
{itemField, 0, ".x"},
|
||||
{itemField, 0, ".y"},
|
||||
{itemField, 0, ".z"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"keywords", "{{range if else end with}}", []item{
|
||||
tLeft,
|
||||
{itemRange, 0, "range"},
|
||||
tSpace,
|
||||
{itemIf, 0, "if"},
|
||||
tSpace,
|
||||
{itemElse, 0, "else"},
|
||||
tSpace,
|
||||
{itemEnd, 0, "end"},
|
||||
tSpace,
|
||||
{itemWith, 0, "with"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"variables", "{{$c := printf $ $hello $23 $ $var.Field .Method}}", []item{
|
||||
tLeft,
|
||||
{itemVariable, 0, "$c"},
|
||||
tSpace,
|
||||
{itemColonEquals, 0, ":="},
|
||||
tSpace,
|
||||
{itemIdentifier, 0, "printf"},
|
||||
tSpace,
|
||||
{itemVariable, 0, "$"},
|
||||
tSpace,
|
||||
{itemVariable, 0, "$hello"},
|
||||
tSpace,
|
||||
{itemVariable, 0, "$23"},
|
||||
tSpace,
|
||||
{itemVariable, 0, "$"},
|
||||
tSpace,
|
||||
{itemVariable, 0, "$var"},
|
||||
{itemField, 0, ".Field"},
|
||||
tSpace,
|
||||
{itemField, 0, ".Method"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"variable invocation", "{{$x 23}}", []item{
|
||||
tLeft,
|
||||
{itemVariable, 0, "$x"},
|
||||
tSpace,
|
||||
{itemNumber, 0, "23"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"pipeline", `intro {{echo hi 1.2 |noargs|args 1 "hi"}} outro`, []item{
|
||||
{itemText, 0, "intro "},
|
||||
tLeft,
|
||||
{itemIdentifier, 0, "echo"},
|
||||
tSpace,
|
||||
{itemIdentifier, 0, "hi"},
|
||||
tSpace,
|
||||
{itemNumber, 0, "1.2"},
|
||||
tSpace,
|
||||
tPipe,
|
||||
{itemIdentifier, 0, "noargs"},
|
||||
tPipe,
|
||||
{itemIdentifier, 0, "args"},
|
||||
tSpace,
|
||||
{itemNumber, 0, "1"},
|
||||
tSpace,
|
||||
{itemString, 0, `"hi"`},
|
||||
tRight,
|
||||
{itemText, 0, " outro"},
|
||||
tEOF,
|
||||
}},
|
||||
{"declaration", "{{$v := 3}}", []item{
|
||||
tLeft,
|
||||
{itemVariable, 0, "$v"},
|
||||
tSpace,
|
||||
{itemColonEquals, 0, ":="},
|
||||
tSpace,
|
||||
{itemNumber, 0, "3"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"2 declarations", "{{$v , $w := 3}}", []item{
|
||||
tLeft,
|
||||
{itemVariable, 0, "$v"},
|
||||
tSpace,
|
||||
{itemChar, 0, ","},
|
||||
tSpace,
|
||||
{itemVariable, 0, "$w"},
|
||||
tSpace,
|
||||
{itemColonEquals, 0, ":="},
|
||||
tSpace,
|
||||
{itemNumber, 0, "3"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"field of parenthesized expression", "{{(.X).Y}}", []item{
|
||||
tLeft,
|
||||
tLpar,
|
||||
{itemField, 0, ".X"},
|
||||
tRpar,
|
||||
{itemField, 0, ".Y"},
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
// errors
|
||||
{"badchar", "#{{\x01}}", []item{
|
||||
{itemText, 0, "#"},
|
||||
tLeft,
|
||||
{itemError, 0, "unrecognized character in action: U+0001"},
|
||||
}},
|
||||
{"unclosed action", "{{\n}}", []item{
|
||||
tLeft,
|
||||
{itemError, 0, "unclosed action"},
|
||||
}},
|
||||
{"EOF in action", "{{range", []item{
|
||||
tLeft,
|
||||
tRange,
|
||||
{itemError, 0, "unclosed action"},
|
||||
}},
|
||||
{"unclosed quote", "{{\"\n\"}}", []item{
|
||||
tLeft,
|
||||
{itemError, 0, "unterminated quoted string"},
|
||||
}},
|
||||
{"unclosed raw quote", "{{`xx\n`}}", []item{
|
||||
tLeft,
|
||||
{itemError, 0, "unterminated raw quoted string"},
|
||||
}},
|
||||
{"unclosed char constant", "{{'\n}}", []item{
|
||||
tLeft,
|
||||
{itemError, 0, "unterminated character constant"},
|
||||
}},
|
||||
{"bad number", "{{3k}}", []item{
|
||||
tLeft,
|
||||
{itemError, 0, `bad number syntax: "3k"`},
|
||||
}},
|
||||
{"unclosed paren", "{{(3}}", []item{
|
||||
tLeft,
|
||||
tLpar,
|
||||
{itemNumber, 0, "3"},
|
||||
{itemError, 0, `unclosed left paren`},
|
||||
}},
|
||||
{"extra right paren", "{{3)}}", []item{
|
||||
tLeft,
|
||||
{itemNumber, 0, "3"},
|
||||
tRpar,
|
||||
{itemError, 0, `unexpected right paren U+0029 ')'`},
|
||||
}},
|
||||
|
||||
// Fixed bugs
|
||||
// Many elements in an action blew the lookahead until
|
||||
// we made lexInsideAction not loop.
|
||||
{"long pipeline deadlock", "{{|||||}}", []item{
|
||||
tLeft,
|
||||
tPipe,
|
||||
tPipe,
|
||||
tPipe,
|
||||
tPipe,
|
||||
tPipe,
|
||||
tRight,
|
||||
tEOF,
|
||||
}},
|
||||
{"text with bad comment", "hello-{{/*/}}-world", []item{
|
||||
{itemText, 0, "hello-"},
|
||||
{itemError, 0, `unclosed comment`},
|
||||
}},
|
||||
{"text with comment close separted from delim", "hello-{{/* */ }}-world", []item{
|
||||
{itemText, 0, "hello-"},
|
||||
{itemError, 0, `comment ends before closing delimiter`},
|
||||
}},
|
||||
// This one is an error that we can't catch because it breaks templates with
|
||||
// minimized JavaScript. Should have fixed it before Go 1.1.
|
||||
{"unmatched right delimiter", "hello-{.}}-world", []item{
|
||||
{itemText, 0, "hello-{.}}-world"},
|
||||
tEOF,
|
||||
}},
|
||||
}
|
||||
|
||||
// collect gathers the emitted items into a slice.
|
||||
func collect(t *lexTest, left, right string) (items []item) {
|
||||
l := lex(t.name, t.input, left, right)
|
||||
for {
|
||||
item := l.nextItem()
|
||||
items = append(items, item)
|
||||
if item.typ == itemEOF || item.typ == itemError {
|
||||
break
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func equal(i1, i2 []item, checkPos bool) bool {
|
||||
if len(i1) != len(i2) {
|
||||
return false
|
||||
}
|
||||
for k := range i1 {
|
||||
if i1[k].typ != i2[k].typ {
|
||||
return false
|
||||
}
|
||||
if i1[k].val != i2[k].val {
|
||||
return false
|
||||
}
|
||||
if checkPos && i1[k].pos != i2[k].pos {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func TestLex(t *testing.T) {
|
||||
for _, test := range lexTests {
|
||||
items := collect(&test, "", "")
|
||||
if !equal(items, test.items, false) {
|
||||
t.Errorf("%s: got\n\t%+v\nexpected\n\t%v", test.name, items, test.items)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Some easy cases from above, but with delimiters $$ and @@
|
||||
var lexDelimTests = []lexTest{
|
||||
{"punctuation", "$$,@%{{}}@@", []item{
|
||||
tLeftDelim,
|
||||
{itemChar, 0, ","},
|
||||
{itemChar, 0, "@"},
|
||||
{itemChar, 0, "%"},
|
||||
{itemChar, 0, "{"},
|
||||
{itemChar, 0, "{"},
|
||||
{itemChar, 0, "}"},
|
||||
{itemChar, 0, "}"},
|
||||
tRightDelim,
|
||||
tEOF,
|
||||
}},
|
||||
{"empty action", `$$@@`, []item{tLeftDelim, tRightDelim, tEOF}},
|
||||
{"for", `$$for@@`, []item{tLeftDelim, tFor, tRightDelim, tEOF}},
|
||||
{"quote", `$$"abc \n\t\" "@@`, []item{tLeftDelim, tQuote, tRightDelim, tEOF}},
|
||||
{"raw quote", "$$" + raw + "@@", []item{tLeftDelim, tRawQuote, tRightDelim, tEOF}},
|
||||
}
|
||||
|
||||
var (
|
||||
tLeftDelim = item{itemLeftDelim, 0, "$$"}
|
||||
tRightDelim = item{itemRightDelim, 0, "@@"}
|
||||
)
|
||||
|
||||
func TestDelims(t *testing.T) {
|
||||
for _, test := range lexDelimTests {
|
||||
items := collect(&test, "$$", "@@")
|
||||
if !equal(items, test.items, false) {
|
||||
t.Errorf("%s: got\n\t%v\nexpected\n\t%v", test.name, items, test.items)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var lexPosTests = []lexTest{
|
||||
{"empty", "", []item{tEOF}},
|
||||
{"punctuation", "{{,@%#}}", []item{
|
||||
{itemLeftDelim, 0, "{{"},
|
||||
{itemChar, 2, ","},
|
||||
{itemChar, 3, "@"},
|
||||
{itemChar, 4, "%"},
|
||||
{itemChar, 5, "#"},
|
||||
{itemRightDelim, 6, "}}"},
|
||||
{itemEOF, 8, ""},
|
||||
}},
|
||||
{"sample", "0123{{hello}}xyz", []item{
|
||||
{itemText, 0, "0123"},
|
||||
{itemLeftDelim, 4, "{{"},
|
||||
{itemIdentifier, 6, "hello"},
|
||||
{itemRightDelim, 11, "}}"},
|
||||
{itemText, 13, "xyz"},
|
||||
{itemEOF, 16, ""},
|
||||
}},
|
||||
}
|
||||
|
||||
// The other tests don't check position, to make the test cases easier to construct.
|
||||
// This one does.
|
||||
func TestPos(t *testing.T) {
|
||||
for _, test := range lexPosTests {
|
||||
items := collect(&test, "", "")
|
||||
if !equal(items, test.items, true) {
|
||||
t.Errorf("%s: got\n\t%v\nexpected\n\t%v", test.name, items, test.items)
|
||||
if len(items) == len(test.items) {
|
||||
// Detailed print; avoid item.String() to expose the position value.
|
||||
for i := range items {
|
||||
if !equal(items[i:i+1], test.items[i:i+1], true) {
|
||||
i1 := items[i]
|
||||
i2 := test.items[i]
|
||||
t.Errorf("\t#%d: got {%v %d %q} expected {%v %d %q}", i, i1.typ, i1.pos, i1.val, i2.typ, i2.pos, i2.val)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
834
Godeps/_workspace/src/github.com/alecthomas/template/parse/node.go
generated
vendored
Normal file
834
Godeps/_workspace/src/github.com/alecthomas/template/parse/node.go
generated
vendored
Normal file
|
@ -0,0 +1,834 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Parse nodes.
|
||||
|
||||
package parse
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var textFormat = "%s" // Changed to "%q" in tests for better error messages.
|
||||
|
||||
// A Node is an element in the parse tree. The interface is trivial.
|
||||
// The interface contains an unexported method so that only
|
||||
// types local to this package can satisfy it.
|
||||
type Node interface {
|
||||
Type() NodeType
|
||||
String() string
|
||||
// Copy does a deep copy of the Node and all its components.
|
||||
// To avoid type assertions, some XxxNodes also have specialized
|
||||
// CopyXxx methods that return *XxxNode.
|
||||
Copy() Node
|
||||
Position() Pos // byte position of start of node in full original input string
|
||||
// tree returns the containing *Tree.
|
||||
// It is unexported so all implementations of Node are in this package.
|
||||
tree() *Tree
|
||||
}
|
||||
|
||||
// NodeType identifies the type of a parse tree node.
|
||||
type NodeType int
|
||||
|
||||
// Pos represents a byte position in the original input text from which
|
||||
// this template was parsed.
|
||||
type Pos int
|
||||
|
||||
func (p Pos) Position() Pos {
|
||||
return p
|
||||
}
|
||||
|
||||
// Type returns itself and provides an easy default implementation
|
||||
// for embedding in a Node. Embedded in all non-trivial Nodes.
|
||||
func (t NodeType) Type() NodeType {
|
||||
return t
|
||||
}
|
||||
|
||||
const (
|
||||
NodeText NodeType = iota // Plain text.
|
||||
NodeAction // A non-control action such as a field evaluation.
|
||||
NodeBool // A boolean constant.
|
||||
NodeChain // A sequence of field accesses.
|
||||
NodeCommand // An element of a pipeline.
|
||||
NodeDot // The cursor, dot.
|
||||
nodeElse // An else action. Not added to tree.
|
||||
nodeEnd // An end action. Not added to tree.
|
||||
NodeField // A field or method name.
|
||||
NodeIdentifier // An identifier; always a function name.
|
||||
NodeIf // An if action.
|
||||
NodeList // A list of Nodes.
|
||||
NodeNil // An untyped nil constant.
|
||||
NodeNumber // A numerical constant.
|
||||
NodePipe // A pipeline of commands.
|
||||
NodeRange // A range action.
|
||||
NodeString // A string constant.
|
||||
NodeTemplate // A template invocation action.
|
||||
NodeVariable // A $ variable.
|
||||
NodeWith // A with action.
|
||||
)
|
||||
|
||||
// Nodes.
|
||||
|
||||
// ListNode holds a sequence of nodes.
|
||||
type ListNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Nodes []Node // The element nodes in lexical order.
|
||||
}
|
||||
|
||||
func (t *Tree) newList(pos Pos) *ListNode {
|
||||
return &ListNode{tr: t, NodeType: NodeList, Pos: pos}
|
||||
}
|
||||
|
||||
func (l *ListNode) append(n Node) {
|
||||
l.Nodes = append(l.Nodes, n)
|
||||
}
|
||||
|
||||
func (l *ListNode) tree() *Tree {
|
||||
return l.tr
|
||||
}
|
||||
|
||||
func (l *ListNode) String() string {
|
||||
b := new(bytes.Buffer)
|
||||
for _, n := range l.Nodes {
|
||||
fmt.Fprint(b, n)
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func (l *ListNode) CopyList() *ListNode {
|
||||
if l == nil {
|
||||
return l
|
||||
}
|
||||
n := l.tr.newList(l.Pos)
|
||||
for _, elem := range l.Nodes {
|
||||
n.append(elem.Copy())
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (l *ListNode) Copy() Node {
|
||||
return l.CopyList()
|
||||
}
|
||||
|
||||
// TextNode holds plain text.
|
||||
type TextNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Text []byte // The text; may span newlines.
|
||||
}
|
||||
|
||||
func (t *Tree) newText(pos Pos, text string) *TextNode {
|
||||
return &TextNode{tr: t, NodeType: NodeText, Pos: pos, Text: []byte(text)}
|
||||
}
|
||||
|
||||
func (t *TextNode) String() string {
|
||||
return fmt.Sprintf(textFormat, t.Text)
|
||||
}
|
||||
|
||||
func (t *TextNode) tree() *Tree {
|
||||
return t.tr
|
||||
}
|
||||
|
||||
func (t *TextNode) Copy() Node {
|
||||
return &TextNode{tr: t.tr, NodeType: NodeText, Pos: t.Pos, Text: append([]byte{}, t.Text...)}
|
||||
}
|
||||
|
||||
// PipeNode holds a pipeline with optional declaration
|
||||
type PipeNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
||||
Decl []*VariableNode // Variable declarations in lexical order.
|
||||
Cmds []*CommandNode // The commands in lexical order.
|
||||
}
|
||||
|
||||
func (t *Tree) newPipeline(pos Pos, line int, decl []*VariableNode) *PipeNode {
|
||||
return &PipeNode{tr: t, NodeType: NodePipe, Pos: pos, Line: line, Decl: decl}
|
||||
}
|
||||
|
||||
func (p *PipeNode) append(command *CommandNode) {
|
||||
p.Cmds = append(p.Cmds, command)
|
||||
}
|
||||
|
||||
func (p *PipeNode) String() string {
|
||||
s := ""
|
||||
if len(p.Decl) > 0 {
|
||||
for i, v := range p.Decl {
|
||||
if i > 0 {
|
||||
s += ", "
|
||||
}
|
||||
s += v.String()
|
||||
}
|
||||
s += " := "
|
||||
}
|
||||
for i, c := range p.Cmds {
|
||||
if i > 0 {
|
||||
s += " | "
|
||||
}
|
||||
s += c.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (p *PipeNode) tree() *Tree {
|
||||
return p.tr
|
||||
}
|
||||
|
||||
func (p *PipeNode) CopyPipe() *PipeNode {
|
||||
if p == nil {
|
||||
return p
|
||||
}
|
||||
var decl []*VariableNode
|
||||
for _, d := range p.Decl {
|
||||
decl = append(decl, d.Copy().(*VariableNode))
|
||||
}
|
||||
n := p.tr.newPipeline(p.Pos, p.Line, decl)
|
||||
for _, c := range p.Cmds {
|
||||
n.append(c.Copy().(*CommandNode))
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *PipeNode) Copy() Node {
|
||||
return p.CopyPipe()
|
||||
}
|
||||
|
||||
// ActionNode holds an action (something bounded by delimiters).
|
||||
// Control actions have their own nodes; ActionNode represents simple
|
||||
// ones such as field evaluations and parenthesized pipelines.
|
||||
type ActionNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
||||
Pipe *PipeNode // The pipeline in the action.
|
||||
}
|
||||
|
||||
func (t *Tree) newAction(pos Pos, line int, pipe *PipeNode) *ActionNode {
|
||||
return &ActionNode{tr: t, NodeType: NodeAction, Pos: pos, Line: line, Pipe: pipe}
|
||||
}
|
||||
|
||||
func (a *ActionNode) String() string {
|
||||
return fmt.Sprintf("{{%s}}", a.Pipe)
|
||||
|
||||
}
|
||||
|
||||
func (a *ActionNode) tree() *Tree {
|
||||
return a.tr
|
||||
}
|
||||
|
||||
func (a *ActionNode) Copy() Node {
|
||||
return a.tr.newAction(a.Pos, a.Line, a.Pipe.CopyPipe())
|
||||
|
||||
}
|
||||
|
||||
// CommandNode holds a command (a pipeline inside an evaluating action).
|
||||
type CommandNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Args []Node // Arguments in lexical order: Identifier, field, or constant.
|
||||
}
|
||||
|
||||
func (t *Tree) newCommand(pos Pos) *CommandNode {
|
||||
return &CommandNode{tr: t, NodeType: NodeCommand, Pos: pos}
|
||||
}
|
||||
|
||||
func (c *CommandNode) append(arg Node) {
|
||||
c.Args = append(c.Args, arg)
|
||||
}
|
||||
|
||||
func (c *CommandNode) String() string {
|
||||
s := ""
|
||||
for i, arg := range c.Args {
|
||||
if i > 0 {
|
||||
s += " "
|
||||
}
|
||||
if arg, ok := arg.(*PipeNode); ok {
|
||||
s += "(" + arg.String() + ")"
|
||||
continue
|
||||
}
|
||||
s += arg.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (c *CommandNode) tree() *Tree {
|
||||
return c.tr
|
||||
}
|
||||
|
||||
func (c *CommandNode) Copy() Node {
|
||||
if c == nil {
|
||||
return c
|
||||
}
|
||||
n := c.tr.newCommand(c.Pos)
|
||||
for _, c := range c.Args {
|
||||
n.append(c.Copy())
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// IdentifierNode holds an identifier.
|
||||
type IdentifierNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Ident string // The identifier's name.
|
||||
}
|
||||
|
||||
// NewIdentifier returns a new IdentifierNode with the given identifier name.
|
||||
func NewIdentifier(ident string) *IdentifierNode {
|
||||
return &IdentifierNode{NodeType: NodeIdentifier, Ident: ident}
|
||||
}
|
||||
|
||||
// SetPos sets the position. NewIdentifier is a public method so we can't modify its signature.
|
||||
// Chained for convenience.
|
||||
// TODO: fix one day?
|
||||
func (i *IdentifierNode) SetPos(pos Pos) *IdentifierNode {
|
||||
i.Pos = pos
|
||||
return i
|
||||
}
|
||||
|
||||
// SetTree sets the parent tree for the node. NewIdentifier is a public method so we can't modify its signature.
|
||||
// Chained for convenience.
|
||||
// TODO: fix one day?
|
||||
func (i *IdentifierNode) SetTree(t *Tree) *IdentifierNode {
|
||||
i.tr = t
|
||||
return i
|
||||
}
|
||||
|
||||
func (i *IdentifierNode) String() string {
|
||||
return i.Ident
|
||||
}
|
||||
|
||||
func (i *IdentifierNode) tree() *Tree {
|
||||
return i.tr
|
||||
}
|
||||
|
||||
func (i *IdentifierNode) Copy() Node {
|
||||
return NewIdentifier(i.Ident).SetTree(i.tr).SetPos(i.Pos)
|
||||
}
|
||||
|
||||
// VariableNode holds a list of variable names, possibly with chained field
|
||||
// accesses. The dollar sign is part of the (first) name.
|
||||
type VariableNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Ident []string // Variable name and fields in lexical order.
|
||||
}
|
||||
|
||||
func (t *Tree) newVariable(pos Pos, ident string) *VariableNode {
|
||||
return &VariableNode{tr: t, NodeType: NodeVariable, Pos: pos, Ident: strings.Split(ident, ".")}
|
||||
}
|
||||
|
||||
func (v *VariableNode) String() string {
|
||||
s := ""
|
||||
for i, id := range v.Ident {
|
||||
if i > 0 {
|
||||
s += "."
|
||||
}
|
||||
s += id
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (v *VariableNode) tree() *Tree {
|
||||
return v.tr
|
||||
}
|
||||
|
||||
func (v *VariableNode) Copy() Node {
|
||||
return &VariableNode{tr: v.tr, NodeType: NodeVariable, Pos: v.Pos, Ident: append([]string{}, v.Ident...)}
|
||||
}
|
||||
|
||||
// DotNode holds the special identifier '.'.
|
||||
type DotNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
}
|
||||
|
||||
func (t *Tree) newDot(pos Pos) *DotNode {
|
||||
return &DotNode{tr: t, NodeType: NodeDot, Pos: pos}
|
||||
}
|
||||
|
||||
func (d *DotNode) Type() NodeType {
|
||||
// Override method on embedded NodeType for API compatibility.
|
||||
// TODO: Not really a problem; could change API without effect but
|
||||
// api tool complains.
|
||||
return NodeDot
|
||||
}
|
||||
|
||||
func (d *DotNode) String() string {
|
||||
return "."
|
||||
}
|
||||
|
||||
func (d *DotNode) tree() *Tree {
|
||||
return d.tr
|
||||
}
|
||||
|
||||
func (d *DotNode) Copy() Node {
|
||||
return d.tr.newDot(d.Pos)
|
||||
}
|
||||
|
||||
// NilNode holds the special identifier 'nil' representing an untyped nil constant.
|
||||
type NilNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
}
|
||||
|
||||
func (t *Tree) newNil(pos Pos) *NilNode {
|
||||
return &NilNode{tr: t, NodeType: NodeNil, Pos: pos}
|
||||
}
|
||||
|
||||
func (n *NilNode) Type() NodeType {
|
||||
// Override method on embedded NodeType for API compatibility.
|
||||
// TODO: Not really a problem; could change API without effect but
|
||||
// api tool complains.
|
||||
return NodeNil
|
||||
}
|
||||
|
||||
func (n *NilNode) String() string {
|
||||
return "nil"
|
||||
}
|
||||
|
||||
func (n *NilNode) tree() *Tree {
|
||||
return n.tr
|
||||
}
|
||||
|
||||
func (n *NilNode) Copy() Node {
|
||||
return n.tr.newNil(n.Pos)
|
||||
}
|
||||
|
||||
// FieldNode holds a field (identifier starting with '.').
|
||||
// The names may be chained ('.x.y').
|
||||
// The period is dropped from each ident.
|
||||
type FieldNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Ident []string // The identifiers in lexical order.
|
||||
}
|
||||
|
||||
func (t *Tree) newField(pos Pos, ident string) *FieldNode {
|
||||
return &FieldNode{tr: t, NodeType: NodeField, Pos: pos, Ident: strings.Split(ident[1:], ".")} // [1:] to drop leading period
|
||||
}
|
||||
|
||||
func (f *FieldNode) String() string {
|
||||
s := ""
|
||||
for _, id := range f.Ident {
|
||||
s += "." + id
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (f *FieldNode) tree() *Tree {
|
||||
return f.tr
|
||||
}
|
||||
|
||||
func (f *FieldNode) Copy() Node {
|
||||
return &FieldNode{tr: f.tr, NodeType: NodeField, Pos: f.Pos, Ident: append([]string{}, f.Ident...)}
|
||||
}
|
||||
|
||||
// ChainNode holds a term followed by a chain of field accesses (identifier starting with '.').
|
||||
// The names may be chained ('.x.y').
|
||||
// The periods are dropped from each ident.
|
||||
type ChainNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Node Node
|
||||
Field []string // The identifiers in lexical order.
|
||||
}
|
||||
|
||||
func (t *Tree) newChain(pos Pos, node Node) *ChainNode {
|
||||
return &ChainNode{tr: t, NodeType: NodeChain, Pos: pos, Node: node}
|
||||
}
|
||||
|
||||
// Add adds the named field (which should start with a period) to the end of the chain.
|
||||
func (c *ChainNode) Add(field string) {
|
||||
if len(field) == 0 || field[0] != '.' {
|
||||
panic("no dot in field")
|
||||
}
|
||||
field = field[1:] // Remove leading dot.
|
||||
if field == "" {
|
||||
panic("empty field")
|
||||
}
|
||||
c.Field = append(c.Field, field)
|
||||
}
|
||||
|
||||
func (c *ChainNode) String() string {
|
||||
s := c.Node.String()
|
||||
if _, ok := c.Node.(*PipeNode); ok {
|
||||
s = "(" + s + ")"
|
||||
}
|
||||
for _, field := range c.Field {
|
||||
s += "." + field
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (c *ChainNode) tree() *Tree {
|
||||
return c.tr
|
||||
}
|
||||
|
||||
func (c *ChainNode) Copy() Node {
|
||||
return &ChainNode{tr: c.tr, NodeType: NodeChain, Pos: c.Pos, Node: c.Node, Field: append([]string{}, c.Field...)}
|
||||
}
|
||||
|
||||
// BoolNode holds a boolean constant.
|
||||
type BoolNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
True bool // The value of the boolean constant.
|
||||
}
|
||||
|
||||
func (t *Tree) newBool(pos Pos, true bool) *BoolNode {
|
||||
return &BoolNode{tr: t, NodeType: NodeBool, Pos: pos, True: true}
|
||||
}
|
||||
|
||||
func (b *BoolNode) String() string {
|
||||
if b.True {
|
||||
return "true"
|
||||
}
|
||||
return "false"
|
||||
}
|
||||
|
||||
func (b *BoolNode) tree() *Tree {
|
||||
return b.tr
|
||||
}
|
||||
|
||||
func (b *BoolNode) Copy() Node {
|
||||
return b.tr.newBool(b.Pos, b.True)
|
||||
}
|
||||
|
||||
// NumberNode holds a number: signed or unsigned integer, float, or complex.
|
||||
// The value is parsed and stored under all the types that can represent the value.
|
||||
// This simulates in a small amount of code the behavior of Go's ideal constants.
|
||||
type NumberNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
IsInt bool // Number has an integral value.
|
||||
IsUint bool // Number has an unsigned integral value.
|
||||
IsFloat bool // Number has a floating-point value.
|
||||
IsComplex bool // Number is complex.
|
||||
Int64 int64 // The signed integer value.
|
||||
Uint64 uint64 // The unsigned integer value.
|
||||
Float64 float64 // The floating-point value.
|
||||
Complex128 complex128 // The complex value.
|
||||
Text string // The original textual representation from the input.
|
||||
}
|
||||
|
||||
func (t *Tree) newNumber(pos Pos, text string, typ itemType) (*NumberNode, error) {
|
||||
n := &NumberNode{tr: t, NodeType: NodeNumber, Pos: pos, Text: text}
|
||||
switch typ {
|
||||
case itemCharConstant:
|
||||
rune, _, tail, err := strconv.UnquoteChar(text[1:], text[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if tail != "'" {
|
||||
return nil, fmt.Errorf("malformed character constant: %s", text)
|
||||
}
|
||||
n.Int64 = int64(rune)
|
||||
n.IsInt = true
|
||||
n.Uint64 = uint64(rune)
|
||||
n.IsUint = true
|
||||
n.Float64 = float64(rune) // odd but those are the rules.
|
||||
n.IsFloat = true
|
||||
return n, nil
|
||||
case itemComplex:
|
||||
// fmt.Sscan can parse the pair, so let it do the work.
|
||||
if _, err := fmt.Sscan(text, &n.Complex128); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
n.IsComplex = true
|
||||
n.simplifyComplex()
|
||||
return n, nil
|
||||
}
|
||||
// Imaginary constants can only be complex unless they are zero.
|
||||
if len(text) > 0 && text[len(text)-1] == 'i' {
|
||||
f, err := strconv.ParseFloat(text[:len(text)-1], 64)
|
||||
if err == nil {
|
||||
n.IsComplex = true
|
||||
n.Complex128 = complex(0, f)
|
||||
n.simplifyComplex()
|
||||
return n, nil
|
||||
}
|
||||
}
|
||||
// Do integer test first so we get 0x123 etc.
|
||||
u, err := strconv.ParseUint(text, 0, 64) // will fail for -0; fixed below.
|
||||
if err == nil {
|
||||
n.IsUint = true
|
||||
n.Uint64 = u
|
||||
}
|
||||
i, err := strconv.ParseInt(text, 0, 64)
|
||||
if err == nil {
|
||||
n.IsInt = true
|
||||
n.Int64 = i
|
||||
if i == 0 {
|
||||
n.IsUint = true // in case of -0.
|
||||
n.Uint64 = u
|
||||
}
|
||||
}
|
||||
// If an integer extraction succeeded, promote the float.
|
||||
if n.IsInt {
|
||||
n.IsFloat = true
|
||||
n.Float64 = float64(n.Int64)
|
||||
} else if n.IsUint {
|
||||
n.IsFloat = true
|
||||
n.Float64 = float64(n.Uint64)
|
||||
} else {
|
||||
f, err := strconv.ParseFloat(text, 64)
|
||||
if err == nil {
|
||||
n.IsFloat = true
|
||||
n.Float64 = f
|
||||
// If a floating-point extraction succeeded, extract the int if needed.
|
||||
if !n.IsInt && float64(int64(f)) == f {
|
||||
n.IsInt = true
|
||||
n.Int64 = int64(f)
|
||||
}
|
||||
if !n.IsUint && float64(uint64(f)) == f {
|
||||
n.IsUint = true
|
||||
n.Uint64 = uint64(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
if !n.IsInt && !n.IsUint && !n.IsFloat {
|
||||
return nil, fmt.Errorf("illegal number syntax: %q", text)
|
||||
}
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// simplifyComplex pulls out any other types that are represented by the complex number.
|
||||
// These all require that the imaginary part be zero.
|
||||
func (n *NumberNode) simplifyComplex() {
|
||||
n.IsFloat = imag(n.Complex128) == 0
|
||||
if n.IsFloat {
|
||||
n.Float64 = real(n.Complex128)
|
||||
n.IsInt = float64(int64(n.Float64)) == n.Float64
|
||||
if n.IsInt {
|
||||
n.Int64 = int64(n.Float64)
|
||||
}
|
||||
n.IsUint = float64(uint64(n.Float64)) == n.Float64
|
||||
if n.IsUint {
|
||||
n.Uint64 = uint64(n.Float64)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (n *NumberNode) String() string {
|
||||
return n.Text
|
||||
}
|
||||
|
||||
func (n *NumberNode) tree() *Tree {
|
||||
return n.tr
|
||||
}
|
||||
|
||||
func (n *NumberNode) Copy() Node {
|
||||
nn := new(NumberNode)
|
||||
*nn = *n // Easy, fast, correct.
|
||||
return nn
|
||||
}
|
||||
|
||||
// StringNode holds a string constant. The value has been "unquoted".
|
||||
type StringNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Quoted string // The original text of the string, with quotes.
|
||||
Text string // The string, after quote processing.
|
||||
}
|
||||
|
||||
func (t *Tree) newString(pos Pos, orig, text string) *StringNode {
|
||||
return &StringNode{tr: t, NodeType: NodeString, Pos: pos, Quoted: orig, Text: text}
|
||||
}
|
||||
|
||||
func (s *StringNode) String() string {
|
||||
return s.Quoted
|
||||
}
|
||||
|
||||
func (s *StringNode) tree() *Tree {
|
||||
return s.tr
|
||||
}
|
||||
|
||||
func (s *StringNode) Copy() Node {
|
||||
return s.tr.newString(s.Pos, s.Quoted, s.Text)
|
||||
}
|
||||
|
||||
// endNode represents an {{end}} action.
|
||||
// It does not appear in the final parse tree.
|
||||
type endNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
}
|
||||
|
||||
func (t *Tree) newEnd(pos Pos) *endNode {
|
||||
return &endNode{tr: t, NodeType: nodeEnd, Pos: pos}
|
||||
}
|
||||
|
||||
func (e *endNode) String() string {
|
||||
return "{{end}}"
|
||||
}
|
||||
|
||||
func (e *endNode) tree() *Tree {
|
||||
return e.tr
|
||||
}
|
||||
|
||||
func (e *endNode) Copy() Node {
|
||||
return e.tr.newEnd(e.Pos)
|
||||
}
|
||||
|
||||
// elseNode represents an {{else}} action. Does not appear in the final tree.
|
||||
type elseNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
||||
}
|
||||
|
||||
func (t *Tree) newElse(pos Pos, line int) *elseNode {
|
||||
return &elseNode{tr: t, NodeType: nodeElse, Pos: pos, Line: line}
|
||||
}
|
||||
|
||||
func (e *elseNode) Type() NodeType {
|
||||
return nodeElse
|
||||
}
|
||||
|
||||
func (e *elseNode) String() string {
|
||||
return "{{else}}"
|
||||
}
|
||||
|
||||
func (e *elseNode) tree() *Tree {
|
||||
return e.tr
|
||||
}
|
||||
|
||||
func (e *elseNode) Copy() Node {
|
||||
return e.tr.newElse(e.Pos, e.Line)
|
||||
}
|
||||
|
||||
// BranchNode is the common representation of if, range, and with.
|
||||
type BranchNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
||||
Pipe *PipeNode // The pipeline to be evaluated.
|
||||
List *ListNode // What to execute if the value is non-empty.
|
||||
ElseList *ListNode // What to execute if the value is empty (nil if absent).
|
||||
}
|
||||
|
||||
func (b *BranchNode) String() string {
|
||||
name := ""
|
||||
switch b.NodeType {
|
||||
case NodeIf:
|
||||
name = "if"
|
||||
case NodeRange:
|
||||
name = "range"
|
||||
case NodeWith:
|
||||
name = "with"
|
||||
default:
|
||||
panic("unknown branch type")
|
||||
}
|
||||
if b.ElseList != nil {
|
||||
return fmt.Sprintf("{{%s %s}}%s{{else}}%s{{end}}", name, b.Pipe, b.List, b.ElseList)
|
||||
}
|
||||
return fmt.Sprintf("{{%s %s}}%s{{end}}", name, b.Pipe, b.List)
|
||||
}
|
||||
|
||||
func (b *BranchNode) tree() *Tree {
|
||||
return b.tr
|
||||
}
|
||||
|
||||
func (b *BranchNode) Copy() Node {
|
||||
switch b.NodeType {
|
||||
case NodeIf:
|
||||
return b.tr.newIf(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
|
||||
case NodeRange:
|
||||
return b.tr.newRange(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
|
||||
case NodeWith:
|
||||
return b.tr.newWith(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
|
||||
default:
|
||||
panic("unknown branch type")
|
||||
}
|
||||
}
|
||||
|
||||
// IfNode represents an {{if}} action and its commands.
|
||||
type IfNode struct {
|
||||
BranchNode
|
||||
}
|
||||
|
||||
func (t *Tree) newIf(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *IfNode {
|
||||
return &IfNode{BranchNode{tr: t, NodeType: NodeIf, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
|
||||
}
|
||||
|
||||
func (i *IfNode) Copy() Node {
|
||||
return i.tr.newIf(i.Pos, i.Line, i.Pipe.CopyPipe(), i.List.CopyList(), i.ElseList.CopyList())
|
||||
}
|
||||
|
||||
// RangeNode represents a {{range}} action and its commands.
|
||||
type RangeNode struct {
|
||||
BranchNode
|
||||
}
|
||||
|
||||
func (t *Tree) newRange(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *RangeNode {
|
||||
return &RangeNode{BranchNode{tr: t, NodeType: NodeRange, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
|
||||
}
|
||||
|
||||
func (r *RangeNode) Copy() Node {
|
||||
return r.tr.newRange(r.Pos, r.Line, r.Pipe.CopyPipe(), r.List.CopyList(), r.ElseList.CopyList())
|
||||
}
|
||||
|
||||
// WithNode represents a {{with}} action and its commands.
|
||||
type WithNode struct {
|
||||
BranchNode
|
||||
}
|
||||
|
||||
func (t *Tree) newWith(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *WithNode {
|
||||
return &WithNode{BranchNode{tr: t, NodeType: NodeWith, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
|
||||
}
|
||||
|
||||
func (w *WithNode) Copy() Node {
|
||||
return w.tr.newWith(w.Pos, w.Line, w.Pipe.CopyPipe(), w.List.CopyList(), w.ElseList.CopyList())
|
||||
}
|
||||
|
||||
// TemplateNode represents a {{template}} action.
|
||||
type TemplateNode struct {
|
||||
NodeType
|
||||
Pos
|
||||
tr *Tree
|
||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
||||
Name string // The name of the template (unquoted).
|
||||
Pipe *PipeNode // The command to evaluate as dot for the template.
|
||||
}
|
||||
|
||||
func (t *Tree) newTemplate(pos Pos, line int, name string, pipe *PipeNode) *TemplateNode {
|
||||
return &TemplateNode{tr: t, NodeType: NodeTemplate, Pos: pos, Line: line, Name: name, Pipe: pipe}
|
||||
}
|
||||
|
||||
func (t *TemplateNode) String() string {
|
||||
if t.Pipe == nil {
|
||||
return fmt.Sprintf("{{template %q}}", t.Name)
|
||||
}
|
||||
return fmt.Sprintf("{{template %q %s}}", t.Name, t.Pipe)
|
||||
}
|
||||
|
||||
func (t *TemplateNode) tree() *Tree {
|
||||
return t.tr
|
||||
}
|
||||
|
||||
func (t *TemplateNode) Copy() Node {
|
||||
return t.tr.newTemplate(t.Pos, t.Line, t.Name, t.Pipe.CopyPipe())
|
||||
}
|
700
Godeps/_workspace/src/github.com/alecthomas/template/parse/parse.go
generated
vendored
Normal file
700
Godeps/_workspace/src/github.com/alecthomas/template/parse/parse.go
generated
vendored
Normal file
|
@ -0,0 +1,700 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package parse builds parse trees for templates as defined by text/template
|
||||
// and html/template. Clients should use those packages to construct templates
|
||||
// rather than this one, which provides shared internal data structures not
|
||||
// intended for general use.
|
||||
package parse
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Tree is the representation of a single parsed template.
|
||||
type Tree struct {
|
||||
Name string // name of the template represented by the tree.
|
||||
ParseName string // name of the top-level template during parsing, for error messages.
|
||||
Root *ListNode // top-level root of the tree.
|
||||
text string // text parsed to create the template (or its parent)
|
||||
// Parsing only; cleared after parse.
|
||||
funcs []map[string]interface{}
|
||||
lex *lexer
|
||||
token [3]item // three-token lookahead for parser.
|
||||
peekCount int
|
||||
vars []string // variables defined at the moment.
|
||||
}
|
||||
|
||||
// Copy returns a copy of the Tree. Any parsing state is discarded.
|
||||
func (t *Tree) Copy() *Tree {
|
||||
if t == nil {
|
||||
return nil
|
||||
}
|
||||
return &Tree{
|
||||
Name: t.Name,
|
||||
ParseName: t.ParseName,
|
||||
Root: t.Root.CopyList(),
|
||||
text: t.text,
|
||||
}
|
||||
}
|
||||
|
||||
// Parse returns a map from template name to parse.Tree, created by parsing the
|
||||
// templates described in the argument string. The top-level template will be
|
||||
// given the specified name. If an error is encountered, parsing stops and an
|
||||
// empty map is returned with the error.
|
||||
func Parse(name, text, leftDelim, rightDelim string, funcs ...map[string]interface{}) (treeSet map[string]*Tree, err error) {
|
||||
treeSet = make(map[string]*Tree)
|
||||
t := New(name)
|
||||
t.text = text
|
||||
_, err = t.Parse(text, leftDelim, rightDelim, treeSet, funcs...)
|
||||
return
|
||||
}
|
||||
|
||||
// next returns the next token.
|
||||
func (t *Tree) next() item {
|
||||
if t.peekCount > 0 {
|
||||
t.peekCount--
|
||||
} else {
|
||||
t.token[0] = t.lex.nextItem()
|
||||
}
|
||||
return t.token[t.peekCount]
|
||||
}
|
||||
|
||||
// backup backs the input stream up one token.
|
||||
func (t *Tree) backup() {
|
||||
t.peekCount++
|
||||
}
|
||||
|
||||
// backup2 backs the input stream up two tokens.
|
||||
// The zeroth token is already there.
|
||||
func (t *Tree) backup2(t1 item) {
|
||||
t.token[1] = t1
|
||||
t.peekCount = 2
|
||||
}
|
||||
|
||||
// backup3 backs the input stream up three tokens
|
||||
// The zeroth token is already there.
|
||||
func (t *Tree) backup3(t2, t1 item) { // Reverse order: we're pushing back.
|
||||
t.token[1] = t1
|
||||
t.token[2] = t2
|
||||
t.peekCount = 3
|
||||
}
|
||||
|
||||
// peek returns but does not consume the next token.
|
||||
func (t *Tree) peek() item {
|
||||
if t.peekCount > 0 {
|
||||
return t.token[t.peekCount-1]
|
||||
}
|
||||
t.peekCount = 1
|
||||
t.token[0] = t.lex.nextItem()
|
||||
return t.token[0]
|
||||
}
|
||||
|
||||
// nextNonSpace returns the next non-space token.
|
||||
func (t *Tree) nextNonSpace() (token item) {
|
||||
for {
|
||||
token = t.next()
|
||||
if token.typ != itemSpace {
|
||||
break
|
||||
}
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
// peekNonSpace returns but does not consume the next non-space token.
|
||||
func (t *Tree) peekNonSpace() (token item) {
|
||||
for {
|
||||
token = t.next()
|
||||
if token.typ != itemSpace {
|
||||
break
|
||||
}
|
||||
}
|
||||
t.backup()
|
||||
return token
|
||||
}
|
||||
|
||||
// Parsing.
|
||||
|
||||
// New allocates a new parse tree with the given name.
|
||||
func New(name string, funcs ...map[string]interface{}) *Tree {
|
||||
return &Tree{
|
||||
Name: name,
|
||||
funcs: funcs,
|
||||
}
|
||||
}
|
||||
|
||||
// ErrorContext returns a textual representation of the location of the node in the input text.
|
||||
// The receiver is only used when the node does not have a pointer to the tree inside,
|
||||
// which can occur in old code.
|
||||
func (t *Tree) ErrorContext(n Node) (location, context string) {
|
||||
pos := int(n.Position())
|
||||
tree := n.tree()
|
||||
if tree == nil {
|
||||
tree = t
|
||||
}
|
||||
text := tree.text[:pos]
|
||||
byteNum := strings.LastIndex(text, "\n")
|
||||
if byteNum == -1 {
|
||||
byteNum = pos // On first line.
|
||||
} else {
|
||||
byteNum++ // After the newline.
|
||||
byteNum = pos - byteNum
|
||||
}
|
||||
lineNum := 1 + strings.Count(text, "\n")
|
||||
context = n.String()
|
||||
if len(context) > 20 {
|
||||
context = fmt.Sprintf("%.20s...", context)
|
||||
}
|
||||
return fmt.Sprintf("%s:%d:%d", tree.ParseName, lineNum, byteNum), context
|
||||
}
|
||||
|
||||
// errorf formats the error and terminates processing.
|
||||
func (t *Tree) errorf(format string, args ...interface{}) {
|
||||
t.Root = nil
|
||||
format = fmt.Sprintf("template: %s:%d: %s", t.ParseName, t.lex.lineNumber(), format)
|
||||
panic(fmt.Errorf(format, args...))
|
||||
}
|
||||
|
||||
// error terminates processing.
|
||||
func (t *Tree) error(err error) {
|
||||
t.errorf("%s", err)
|
||||
}
|
||||
|
||||
// expect consumes the next token and guarantees it has the required type.
|
||||
func (t *Tree) expect(expected itemType, context string) item {
|
||||
token := t.nextNonSpace()
|
||||
if token.typ != expected {
|
||||
t.unexpected(token, context)
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
// expectOneOf consumes the next token and guarantees it has one of the required types.
|
||||
func (t *Tree) expectOneOf(expected1, expected2 itemType, context string) item {
|
||||
token := t.nextNonSpace()
|
||||
if token.typ != expected1 && token.typ != expected2 {
|
||||
t.unexpected(token, context)
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
// unexpected complains about the token and terminates processing.
|
||||
func (t *Tree) unexpected(token item, context string) {
|
||||
t.errorf("unexpected %s in %s", token, context)
|
||||
}
|
||||
|
||||
// recover is the handler that turns panics into returns from the top level of Parse.
|
||||
func (t *Tree) recover(errp *error) {
|
||||
e := recover()
|
||||
if e != nil {
|
||||
if _, ok := e.(runtime.Error); ok {
|
||||
panic(e)
|
||||
}
|
||||
if t != nil {
|
||||
t.stopParse()
|
||||
}
|
||||
*errp = e.(error)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// startParse initializes the parser, using the lexer.
|
||||
func (t *Tree) startParse(funcs []map[string]interface{}, lex *lexer) {
|
||||
t.Root = nil
|
||||
t.lex = lex
|
||||
t.vars = []string{"$"}
|
||||
t.funcs = funcs
|
||||
}
|
||||
|
||||
// stopParse terminates parsing.
|
||||
func (t *Tree) stopParse() {
|
||||
t.lex = nil
|
||||
t.vars = nil
|
||||
t.funcs = nil
|
||||
}
|
||||
|
||||
// Parse parses the template definition string to construct a representation of
|
||||
// the template for execution. If either action delimiter string is empty, the
|
||||
// default ("{{" or "}}") is used. Embedded template definitions are added to
|
||||
// the treeSet map.
|
||||
func (t *Tree) Parse(text, leftDelim, rightDelim string, treeSet map[string]*Tree, funcs ...map[string]interface{}) (tree *Tree, err error) {
|
||||
defer t.recover(&err)
|
||||
t.ParseName = t.Name
|
||||
t.startParse(funcs, lex(t.Name, text, leftDelim, rightDelim))
|
||||
t.text = text
|
||||
t.parse(treeSet)
|
||||
t.add(treeSet)
|
||||
t.stopParse()
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// add adds tree to the treeSet.
|
||||
func (t *Tree) add(treeSet map[string]*Tree) {
|
||||
tree := treeSet[t.Name]
|
||||
if tree == nil || IsEmptyTree(tree.Root) {
|
||||
treeSet[t.Name] = t
|
||||
return
|
||||
}
|
||||
if !IsEmptyTree(t.Root) {
|
||||
t.errorf("template: multiple definition of template %q", t.Name)
|
||||
}
|
||||
}
|
||||
|
||||
// IsEmptyTree reports whether this tree (node) is empty of everything but space.
|
||||
func IsEmptyTree(n Node) bool {
|
||||
switch n := n.(type) {
|
||||
case nil:
|
||||
return true
|
||||
case *ActionNode:
|
||||
case *IfNode:
|
||||
case *ListNode:
|
||||
for _, node := range n.Nodes {
|
||||
if !IsEmptyTree(node) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case *RangeNode:
|
||||
case *TemplateNode:
|
||||
case *TextNode:
|
||||
return len(bytes.TrimSpace(n.Text)) == 0
|
||||
case *WithNode:
|
||||
default:
|
||||
panic("unknown node: " + n.String())
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// parse is the top-level parser for a template, essentially the same
|
||||
// as itemList except it also parses {{define}} actions.
|
||||
// It runs to EOF.
|
||||
func (t *Tree) parse(treeSet map[string]*Tree) (next Node) {
|
||||
t.Root = t.newList(t.peek().pos)
|
||||
for t.peek().typ != itemEOF {
|
||||
if t.peek().typ == itemLeftDelim {
|
||||
delim := t.next()
|
||||
if t.nextNonSpace().typ == itemDefine {
|
||||
newT := New("definition") // name will be updated once we know it.
|
||||
newT.text = t.text
|
||||
newT.ParseName = t.ParseName
|
||||
newT.startParse(t.funcs, t.lex)
|
||||
newT.parseDefinition(treeSet)
|
||||
continue
|
||||
}
|
||||
t.backup2(delim)
|
||||
}
|
||||
n := t.textOrAction()
|
||||
if n.Type() == nodeEnd {
|
||||
t.errorf("unexpected %s", n)
|
||||
}
|
||||
t.Root.append(n)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// parseDefinition parses a {{define}} ... {{end}} template definition and
|
||||
// installs the definition in the treeSet map. The "define" keyword has already
|
||||
// been scanned.
|
||||
func (t *Tree) parseDefinition(treeSet map[string]*Tree) {
|
||||
const context = "define clause"
|
||||
name := t.expectOneOf(itemString, itemRawString, context)
|
||||
var err error
|
||||
t.Name, err = strconv.Unquote(name.val)
|
||||
if err != nil {
|
||||
t.error(err)
|
||||
}
|
||||
t.expect(itemRightDelim, context)
|
||||
var end Node
|
||||
t.Root, end = t.itemList()
|
||||
if end.Type() != nodeEnd {
|
||||
t.errorf("unexpected %s in %s", end, context)
|
||||
}
|
||||
t.add(treeSet)
|
||||
t.stopParse()
|
||||
}
|
||||
|
||||
// itemList:
|
||||
// textOrAction*
|
||||
// Terminates at {{end}} or {{else}}, returned separately.
|
||||
func (t *Tree) itemList() (list *ListNode, next Node) {
|
||||
list = t.newList(t.peekNonSpace().pos)
|
||||
for t.peekNonSpace().typ != itemEOF {
|
||||
n := t.textOrAction()
|
||||
switch n.Type() {
|
||||
case nodeEnd, nodeElse:
|
||||
return list, n
|
||||
}
|
||||
list.append(n)
|
||||
}
|
||||
t.errorf("unexpected EOF")
|
||||
return
|
||||
}
|
||||
|
||||
// textOrAction:
|
||||
// text | action
|
||||
func (t *Tree) textOrAction() Node {
|
||||
switch token := t.nextNonSpace(); token.typ {
|
||||
case itemElideNewline:
|
||||
return t.elideNewline()
|
||||
case itemText:
|
||||
return t.newText(token.pos, token.val)
|
||||
case itemLeftDelim:
|
||||
return t.action()
|
||||
default:
|
||||
t.unexpected(token, "input")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// elideNewline:
|
||||
// Remove newlines trailing rightDelim if \\ is present.
|
||||
func (t *Tree) elideNewline() Node {
|
||||
token := t.peek()
|
||||
if token.typ != itemText {
|
||||
t.unexpected(token, "input")
|
||||
return nil
|
||||
}
|
||||
|
||||
t.next()
|
||||
stripped := strings.TrimLeft(token.val, "\n\r")
|
||||
diff := len(token.val) - len(stripped)
|
||||
if diff > 0 {
|
||||
// This is a bit nasty. We mutate the token in-place to remove
|
||||
// preceding newlines.
|
||||
token.pos += Pos(diff)
|
||||
token.val = stripped
|
||||
}
|
||||
return t.newText(token.pos, token.val)
|
||||
}
|
||||
|
||||
// Action:
|
||||
// control
|
||||
// command ("|" command)*
|
||||
// Left delim is past. Now get actions.
|
||||
// First word could be a keyword such as range.
|
||||
func (t *Tree) action() (n Node) {
|
||||
switch token := t.nextNonSpace(); token.typ {
|
||||
case itemElse:
|
||||
return t.elseControl()
|
||||
case itemEnd:
|
||||
return t.endControl()
|
||||
case itemIf:
|
||||
return t.ifControl()
|
||||
case itemRange:
|
||||
return t.rangeControl()
|
||||
case itemTemplate:
|
||||
return t.templateControl()
|
||||
case itemWith:
|
||||
return t.withControl()
|
||||
}
|
||||
t.backup()
|
||||
// Do not pop variables; they persist until "end".
|
||||
return t.newAction(t.peek().pos, t.lex.lineNumber(), t.pipeline("command"))
|
||||
}
|
||||
|
||||
// Pipeline:
|
||||
// declarations? command ('|' command)*
|
||||
func (t *Tree) pipeline(context string) (pipe *PipeNode) {
|
||||
var decl []*VariableNode
|
||||
pos := t.peekNonSpace().pos
|
||||
// Are there declarations?
|
||||
for {
|
||||
if v := t.peekNonSpace(); v.typ == itemVariable {
|
||||
t.next()
|
||||
// Since space is a token, we need 3-token look-ahead here in the worst case:
|
||||
// in "$x foo" we need to read "foo" (as opposed to ":=") to know that $x is an
|
||||
// argument variable rather than a declaration. So remember the token
|
||||
// adjacent to the variable so we can push it back if necessary.
|
||||
tokenAfterVariable := t.peek()
|
||||
if next := t.peekNonSpace(); next.typ == itemColonEquals || (next.typ == itemChar && next.val == ",") {
|
||||
t.nextNonSpace()
|
||||
variable := t.newVariable(v.pos, v.val)
|
||||
decl = append(decl, variable)
|
||||
t.vars = append(t.vars, v.val)
|
||||
if next.typ == itemChar && next.val == "," {
|
||||
if context == "range" && len(decl) < 2 {
|
||||
continue
|
||||
}
|
||||
t.errorf("too many declarations in %s", context)
|
||||
}
|
||||
} else if tokenAfterVariable.typ == itemSpace {
|
||||
t.backup3(v, tokenAfterVariable)
|
||||
} else {
|
||||
t.backup2(v)
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
pipe = t.newPipeline(pos, t.lex.lineNumber(), decl)
|
||||
for {
|
||||
switch token := t.nextNonSpace(); token.typ {
|
||||
case itemRightDelim, itemRightParen:
|
||||
if len(pipe.Cmds) == 0 {
|
||||
t.errorf("missing value for %s", context)
|
||||
}
|
||||
if token.typ == itemRightParen {
|
||||
t.backup()
|
||||
}
|
||||
return
|
||||
case itemBool, itemCharConstant, itemComplex, itemDot, itemField, itemIdentifier,
|
||||
itemNumber, itemNil, itemRawString, itemString, itemVariable, itemLeftParen:
|
||||
t.backup()
|
||||
pipe.append(t.command())
|
||||
default:
|
||||
t.unexpected(token, context)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Tree) parseControl(allowElseIf bool, context string) (pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) {
|
||||
defer t.popVars(len(t.vars))
|
||||
line = t.lex.lineNumber()
|
||||
pipe = t.pipeline(context)
|
||||
var next Node
|
||||
list, next = t.itemList()
|
||||
switch next.Type() {
|
||||
case nodeEnd: //done
|
||||
case nodeElse:
|
||||
if allowElseIf {
|
||||
// Special case for "else if". If the "else" is followed immediately by an "if",
|
||||
// the elseControl will have left the "if" token pending. Treat
|
||||
// {{if a}}_{{else if b}}_{{end}}
|
||||
// as
|
||||
// {{if a}}_{{else}}{{if b}}_{{end}}{{end}}.
|
||||
// To do this, parse the if as usual and stop at it {{end}}; the subsequent{{end}}
|
||||
// is assumed. This technique works even for long if-else-if chains.
|
||||
// TODO: Should we allow else-if in with and range?
|
||||
if t.peek().typ == itemIf {
|
||||
t.next() // Consume the "if" token.
|
||||
elseList = t.newList(next.Position())
|
||||
elseList.append(t.ifControl())
|
||||
// Do not consume the next item - only one {{end}} required.
|
||||
break
|
||||
}
|
||||
}
|
||||
elseList, next = t.itemList()
|
||||
if next.Type() != nodeEnd {
|
||||
t.errorf("expected end; found %s", next)
|
||||
}
|
||||
}
|
||||
return pipe.Position(), line, pipe, list, elseList
|
||||
}
|
||||
|
||||
// If:
|
||||
// {{if pipeline}} itemList {{end}}
|
||||
// {{if pipeline}} itemList {{else}} itemList {{end}}
|
||||
// If keyword is past.
|
||||
func (t *Tree) ifControl() Node {
|
||||
return t.newIf(t.parseControl(true, "if"))
|
||||
}
|
||||
|
||||
// Range:
|
||||
// {{range pipeline}} itemList {{end}}
|
||||
// {{range pipeline}} itemList {{else}} itemList {{end}}
|
||||
// Range keyword is past.
|
||||
func (t *Tree) rangeControl() Node {
|
||||
return t.newRange(t.parseControl(false, "range"))
|
||||
}
|
||||
|
||||
// With:
|
||||
// {{with pipeline}} itemList {{end}}
|
||||
// {{with pipeline}} itemList {{else}} itemList {{end}}
|
||||
// If keyword is past.
|
||||
func (t *Tree) withControl() Node {
|
||||
return t.newWith(t.parseControl(false, "with"))
|
||||
}
|
||||
|
||||
// End:
|
||||
// {{end}}
|
||||
// End keyword is past.
|
||||
func (t *Tree) endControl() Node {
|
||||
return t.newEnd(t.expect(itemRightDelim, "end").pos)
|
||||
}
|
||||
|
||||
// Else:
|
||||
// {{else}}
|
||||
// Else keyword is past.
|
||||
func (t *Tree) elseControl() Node {
|
||||
// Special case for "else if".
|
||||
peek := t.peekNonSpace()
|
||||
if peek.typ == itemIf {
|
||||
// We see "{{else if ... " but in effect rewrite it to {{else}}{{if ... ".
|
||||
return t.newElse(peek.pos, t.lex.lineNumber())
|
||||
}
|
||||
return t.newElse(t.expect(itemRightDelim, "else").pos, t.lex.lineNumber())
|
||||
}
|
||||
|
||||
// Template:
|
||||
// {{template stringValue pipeline}}
|
||||
// Template keyword is past. The name must be something that can evaluate
|
||||
// to a string.
|
||||
func (t *Tree) templateControl() Node {
|
||||
var name string
|
||||
token := t.nextNonSpace()
|
||||
switch token.typ {
|
||||
case itemString, itemRawString:
|
||||
s, err := strconv.Unquote(token.val)
|
||||
if err != nil {
|
||||
t.error(err)
|
||||
}
|
||||
name = s
|
||||
default:
|
||||
t.unexpected(token, "template invocation")
|
||||
}
|
||||
var pipe *PipeNode
|
||||
if t.nextNonSpace().typ != itemRightDelim {
|
||||
t.backup()
|
||||
// Do not pop variables; they persist until "end".
|
||||
pipe = t.pipeline("template")
|
||||
}
|
||||
return t.newTemplate(token.pos, t.lex.lineNumber(), name, pipe)
|
||||
}
|
||||
|
||||
// command:
|
||||
// operand (space operand)*
|
||||
// space-separated arguments up to a pipeline character or right delimiter.
|
||||
// we consume the pipe character but leave the right delim to terminate the action.
|
||||
func (t *Tree) command() *CommandNode {
|
||||
cmd := t.newCommand(t.peekNonSpace().pos)
|
||||
for {
|
||||
t.peekNonSpace() // skip leading spaces.
|
||||
operand := t.operand()
|
||||
if operand != nil {
|
||||
cmd.append(operand)
|
||||
}
|
||||
switch token := t.next(); token.typ {
|
||||
case itemSpace:
|
||||
continue
|
||||
case itemError:
|
||||
t.errorf("%s", token.val)
|
||||
case itemRightDelim, itemRightParen:
|
||||
t.backup()
|
||||
case itemPipe:
|
||||
default:
|
||||
t.errorf("unexpected %s in operand; missing space?", token)
|
||||
}
|
||||
break
|
||||
}
|
||||
if len(cmd.Args) == 0 {
|
||||
t.errorf("empty command")
|
||||
}
|
||||
return cmd
|
||||
}
|
||||
|
||||
// operand:
|
||||
// term .Field*
|
||||
// An operand is a space-separated component of a command,
|
||||
// a term possibly followed by field accesses.
|
||||
// A nil return means the next item is not an operand.
|
||||
func (t *Tree) operand() Node {
|
||||
node := t.term()
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
if t.peek().typ == itemField {
|
||||
chain := t.newChain(t.peek().pos, node)
|
||||
for t.peek().typ == itemField {
|
||||
chain.Add(t.next().val)
|
||||
}
|
||||
// Compatibility with original API: If the term is of type NodeField
|
||||
// or NodeVariable, just put more fields on the original.
|
||||
// Otherwise, keep the Chain node.
|
||||
// TODO: Switch to Chains always when we can.
|
||||
switch node.Type() {
|
||||
case NodeField:
|
||||
node = t.newField(chain.Position(), chain.String())
|
||||
case NodeVariable:
|
||||
node = t.newVariable(chain.Position(), chain.String())
|
||||
default:
|
||||
node = chain
|
||||
}
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// term:
|
||||
// literal (number, string, nil, boolean)
|
||||
// function (identifier)
|
||||
// .
|
||||
// .Field
|
||||
// $
|
||||
// '(' pipeline ')'
|
||||
// A term is a simple "expression".
|
||||
// A nil return means the next item is not a term.
|
||||
func (t *Tree) term() Node {
|
||||
switch token := t.nextNonSpace(); token.typ {
|
||||
case itemError:
|
||||
t.errorf("%s", token.val)
|
||||
case itemIdentifier:
|
||||
if !t.hasFunction(token.val) {
|
||||
t.errorf("function %q not defined", token.val)
|
||||
}
|
||||
return NewIdentifier(token.val).SetTree(t).SetPos(token.pos)
|
||||
case itemDot:
|
||||
return t.newDot(token.pos)
|
||||
case itemNil:
|
||||
return t.newNil(token.pos)
|
||||
case itemVariable:
|
||||
return t.useVar(token.pos, token.val)
|
||||
case itemField:
|
||||
return t.newField(token.pos, token.val)
|
||||
case itemBool:
|
||||
return t.newBool(token.pos, token.val == "true")
|
||||
case itemCharConstant, itemComplex, itemNumber:
|
||||
number, err := t.newNumber(token.pos, token.val, token.typ)
|
||||
if err != nil {
|
||||
t.error(err)
|
||||
}
|
||||
return number
|
||||
case itemLeftParen:
|
||||
pipe := t.pipeline("parenthesized pipeline")
|
||||
if token := t.next(); token.typ != itemRightParen {
|
||||
t.errorf("unclosed right paren: unexpected %s", token)
|
||||
}
|
||||
return pipe
|
||||
case itemString, itemRawString:
|
||||
s, err := strconv.Unquote(token.val)
|
||||
if err != nil {
|
||||
t.error(err)
|
||||
}
|
||||
return t.newString(token.pos, token.val, s)
|
||||
}
|
||||
t.backup()
|
||||
return nil
|
||||
}
|
||||
|
||||
// hasFunction reports if a function name exists in the Tree's maps.
|
||||
func (t *Tree) hasFunction(name string) bool {
|
||||
for _, funcMap := range t.funcs {
|
||||
if funcMap == nil {
|
||||
continue
|
||||
}
|
||||
if funcMap[name] != nil {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// popVars trims the variable list to the specified length
|
||||
func (t *Tree) popVars(n int) {
|
||||
t.vars = t.vars[:n]
|
||||
}
|
||||
|
||||
// useVar returns a node for a variable reference. It errors if the
|
||||
// variable is not defined.
|
||||
func (t *Tree) useVar(pos Pos, name string) Node {
|
||||
v := t.newVariable(pos, name)
|
||||
for _, varName := range t.vars {
|
||||
if varName == v.Ident[0] {
|
||||
return v
|
||||
}
|
||||
}
|
||||
t.errorf("undefined variable %q", v.Ident[0])
|
||||
return nil
|
||||
}
|
426
Godeps/_workspace/src/github.com/alecthomas/template/parse/parse_test.go
generated
vendored
Normal file
426
Godeps/_workspace/src/github.com/alecthomas/template/parse/parse_test.go
generated
vendored
Normal file
|
@ -0,0 +1,426 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package parse
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var debug = flag.Bool("debug", false, "show the errors produced by the main tests")
|
||||
|
||||
type numberTest struct {
|
||||
text string
|
||||
isInt bool
|
||||
isUint bool
|
||||
isFloat bool
|
||||
isComplex bool
|
||||
int64
|
||||
uint64
|
||||
float64
|
||||
complex128
|
||||
}
|
||||
|
||||
var numberTests = []numberTest{
|
||||
// basics
|
||||
{"0", true, true, true, false, 0, 0, 0, 0},
|
||||
{"-0", true, true, true, false, 0, 0, 0, 0}, // check that -0 is a uint.
|
||||
{"73", true, true, true, false, 73, 73, 73, 0},
|
||||
{"073", true, true, true, false, 073, 073, 073, 0},
|
||||
{"0x73", true, true, true, false, 0x73, 0x73, 0x73, 0},
|
||||
{"-73", true, false, true, false, -73, 0, -73, 0},
|
||||
{"+73", true, false, true, false, 73, 0, 73, 0},
|
||||
{"100", true, true, true, false, 100, 100, 100, 0},
|
||||
{"1e9", true, true, true, false, 1e9, 1e9, 1e9, 0},
|
||||
{"-1e9", true, false, true, false, -1e9, 0, -1e9, 0},
|
||||
{"-1.2", false, false, true, false, 0, 0, -1.2, 0},
|
||||
{"1e19", false, true, true, false, 0, 1e19, 1e19, 0},
|
||||
{"-1e19", false, false, true, false, 0, 0, -1e19, 0},
|
||||
{"4i", false, false, false, true, 0, 0, 0, 4i},
|
||||
{"-1.2+4.2i", false, false, false, true, 0, 0, 0, -1.2 + 4.2i},
|
||||
{"073i", false, false, false, true, 0, 0, 0, 73i}, // not octal!
|
||||
// complex with 0 imaginary are float (and maybe integer)
|
||||
{"0i", true, true, true, true, 0, 0, 0, 0},
|
||||
{"-1.2+0i", false, false, true, true, 0, 0, -1.2, -1.2},
|
||||
{"-12+0i", true, false, true, true, -12, 0, -12, -12},
|
||||
{"13+0i", true, true, true, true, 13, 13, 13, 13},
|
||||
// funny bases
|
||||
{"0123", true, true, true, false, 0123, 0123, 0123, 0},
|
||||
{"-0x0", true, true, true, false, 0, 0, 0, 0},
|
||||
{"0xdeadbeef", true, true, true, false, 0xdeadbeef, 0xdeadbeef, 0xdeadbeef, 0},
|
||||
// character constants
|
||||
{`'a'`, true, true, true, false, 'a', 'a', 'a', 0},
|
||||
{`'\n'`, true, true, true, false, '\n', '\n', '\n', 0},
|
||||
{`'\\'`, true, true, true, false, '\\', '\\', '\\', 0},
|
||||
{`'\''`, true, true, true, false, '\'', '\'', '\'', 0},
|
||||
{`'\xFF'`, true, true, true, false, 0xFF, 0xFF, 0xFF, 0},
|
||||
{`'パ'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0},
|
||||
{`'\u30d1'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0},
|
||||
{`'\U000030d1'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0},
|
||||
// some broken syntax
|
||||
{text: "+-2"},
|
||||
{text: "0x123."},
|
||||
{text: "1e."},
|
||||
{text: "0xi."},
|
||||
{text: "1+2."},
|
||||
{text: "'x"},
|
||||
{text: "'xx'"},
|
||||
// Issue 8622 - 0xe parsed as floating point. Very embarrassing.
|
||||
{"0xef", true, true, true, false, 0xef, 0xef, 0xef, 0},
|
||||
}
|
||||
|
||||
func TestNumberParse(t *testing.T) {
|
||||
for _, test := range numberTests {
|
||||
// If fmt.Sscan thinks it's complex, it's complex. We can't trust the output
|
||||
// because imaginary comes out as a number.
|
||||
var c complex128
|
||||
typ := itemNumber
|
||||
var tree *Tree
|
||||
if test.text[0] == '\'' {
|
||||
typ = itemCharConstant
|
||||
} else {
|
||||
_, err := fmt.Sscan(test.text, &c)
|
||||
if err == nil {
|
||||
typ = itemComplex
|
||||
}
|
||||
}
|
||||
n, err := tree.newNumber(0, test.text, typ)
|
||||
ok := test.isInt || test.isUint || test.isFloat || test.isComplex
|
||||
if ok && err != nil {
|
||||
t.Errorf("unexpected error for %q: %s", test.text, err)
|
||||
continue
|
||||
}
|
||||
if !ok && err == nil {
|
||||
t.Errorf("expected error for %q", test.text)
|
||||
continue
|
||||
}
|
||||
if !ok {
|
||||
if *debug {
|
||||
fmt.Printf("%s\n\t%s\n", test.text, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if n.IsComplex != test.isComplex {
|
||||
t.Errorf("complex incorrect for %q; should be %t", test.text, test.isComplex)
|
||||
}
|
||||
if test.isInt {
|
||||
if !n.IsInt {
|
||||
t.Errorf("expected integer for %q", test.text)
|
||||
}
|
||||
if n.Int64 != test.int64 {
|
||||
t.Errorf("int64 for %q should be %d Is %d", test.text, test.int64, n.Int64)
|
||||
}
|
||||
} else if n.IsInt {
|
||||
t.Errorf("did not expect integer for %q", test.text)
|
||||
}
|
||||
if test.isUint {
|
||||
if !n.IsUint {
|
||||
t.Errorf("expected unsigned integer for %q", test.text)
|
||||
}
|
||||
if n.Uint64 != test.uint64 {
|
||||
t.Errorf("uint64 for %q should be %d Is %d", test.text, test.uint64, n.Uint64)
|
||||
}
|
||||
} else if n.IsUint {
|
||||
t.Errorf("did not expect unsigned integer for %q", test.text)
|
||||
}
|
||||
if test.isFloat {
|
||||
if !n.IsFloat {
|
||||
t.Errorf("expected float for %q", test.text)
|
||||
}
|
||||
if n.Float64 != test.float64 {
|
||||
t.Errorf("float64 for %q should be %g Is %g", test.text, test.float64, n.Float64)
|
||||
}
|
||||
} else if n.IsFloat {
|
||||
t.Errorf("did not expect float for %q", test.text)
|
||||
}
|
||||
if test.isComplex {
|
||||
if !n.IsComplex {
|
||||
t.Errorf("expected complex for %q", test.text)
|
||||
}
|
||||
if n.Complex128 != test.complex128 {
|
||||
t.Errorf("complex128 for %q should be %g Is %g", test.text, test.complex128, n.Complex128)
|
||||
}
|
||||
} else if n.IsComplex {
|
||||
t.Errorf("did not expect complex for %q", test.text)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type parseTest struct {
|
||||
name string
|
||||
input string
|
||||
ok bool
|
||||
result string // what the user would see in an error message.
|
||||
}
|
||||
|
||||
const (
|
||||
noError = true
|
||||
hasError = false
|
||||
)
|
||||
|
||||
var parseTests = []parseTest{
|
||||
{"empty", "", noError,
|
||||
``},
|
||||
{"comment", "{{/*\n\n\n*/}}", noError,
|
||||
``},
|
||||
{"spaces", " \t\n", noError,
|
||||
`" \t\n"`},
|
||||
{"text", "some text", noError,
|
||||
`"some text"`},
|
||||
{"emptyAction", "{{}}", hasError,
|
||||
`{{}}`},
|
||||
{"field", "{{.X}}", noError,
|
||||
`{{.X}}`},
|
||||
{"simple command", "{{printf}}", noError,
|
||||
`{{printf}}`},
|
||||
{"$ invocation", "{{$}}", noError,
|
||||
"{{$}}"},
|
||||
{"variable invocation", "{{with $x := 3}}{{$x 23}}{{end}}", noError,
|
||||
"{{with $x := 3}}{{$x 23}}{{end}}"},
|
||||
{"variable with fields", "{{$.I}}", noError,
|
||||
"{{$.I}}"},
|
||||
{"multi-word command", "{{printf `%d` 23}}", noError,
|
||||
"{{printf `%d` 23}}"},
|
||||
{"pipeline", "{{.X|.Y}}", noError,
|
||||
`{{.X | .Y}}`},
|
||||
{"pipeline with decl", "{{$x := .X|.Y}}", noError,
|
||||
`{{$x := .X | .Y}}`},
|
||||
{"nested pipeline", "{{.X (.Y .Z) (.A | .B .C) (.E)}}", noError,
|
||||
`{{.X (.Y .Z) (.A | .B .C) (.E)}}`},
|
||||
{"field applied to parentheses", "{{(.Y .Z).Field}}", noError,
|
||||
`{{(.Y .Z).Field}}`},
|
||||
{"simple if", "{{if .X}}hello{{end}}", noError,
|
||||
`{{if .X}}"hello"{{end}}`},
|
||||
{"if with else", "{{if .X}}true{{else}}false{{end}}", noError,
|
||||
`{{if .X}}"true"{{else}}"false"{{end}}`},
|
||||
{"if with else if", "{{if .X}}true{{else if .Y}}false{{end}}", noError,
|
||||
`{{if .X}}"true"{{else}}{{if .Y}}"false"{{end}}{{end}}`},
|
||||
{"if else chain", "+{{if .X}}X{{else if .Y}}Y{{else if .Z}}Z{{end}}+", noError,
|
||||
`"+"{{if .X}}"X"{{else}}{{if .Y}}"Y"{{else}}{{if .Z}}"Z"{{end}}{{end}}{{end}}"+"`},
|
||||
{"simple range", "{{range .X}}hello{{end}}", noError,
|
||||
`{{range .X}}"hello"{{end}}`},
|
||||
{"chained field range", "{{range .X.Y.Z}}hello{{end}}", noError,
|
||||
`{{range .X.Y.Z}}"hello"{{end}}`},
|
||||
{"nested range", "{{range .X}}hello{{range .Y}}goodbye{{end}}{{end}}", noError,
|
||||
`{{range .X}}"hello"{{range .Y}}"goodbye"{{end}}{{end}}`},
|
||||
{"range with else", "{{range .X}}true{{else}}false{{end}}", noError,
|
||||
`{{range .X}}"true"{{else}}"false"{{end}}`},
|
||||
{"range over pipeline", "{{range .X|.M}}true{{else}}false{{end}}", noError,
|
||||
`{{range .X | .M}}"true"{{else}}"false"{{end}}`},
|
||||
{"range []int", "{{range .SI}}{{.}}{{end}}", noError,
|
||||
`{{range .SI}}{{.}}{{end}}`},
|
||||
{"range 1 var", "{{range $x := .SI}}{{.}}{{end}}", noError,
|
||||
`{{range $x := .SI}}{{.}}{{end}}`},
|
||||
{"range 2 vars", "{{range $x, $y := .SI}}{{.}}{{end}}", noError,
|
||||
`{{range $x, $y := .SI}}{{.}}{{end}}`},
|
||||
{"constants", "{{range .SI 1 -3.2i true false 'a' nil}}{{end}}", noError,
|
||||
`{{range .SI 1 -3.2i true false 'a' nil}}{{end}}`},
|
||||
{"template", "{{template `x`}}", noError,
|
||||
`{{template "x"}}`},
|
||||
{"template with arg", "{{template `x` .Y}}", noError,
|
||||
`{{template "x" .Y}}`},
|
||||
{"with", "{{with .X}}hello{{end}}", noError,
|
||||
`{{with .X}}"hello"{{end}}`},
|
||||
{"with with else", "{{with .X}}hello{{else}}goodbye{{end}}", noError,
|
||||
`{{with .X}}"hello"{{else}}"goodbye"{{end}}`},
|
||||
{"elide newline", "{{true}}\\\n ", noError,
|
||||
`{{true}}" "`},
|
||||
// Errors.
|
||||
{"unclosed action", "hello{{range", hasError, ""},
|
||||
{"unmatched end", "{{end}}", hasError, ""},
|
||||
{"missing end", "hello{{range .x}}", hasError, ""},
|
||||
{"missing end after else", "hello{{range .x}}{{else}}", hasError, ""},
|
||||
{"undefined function", "hello{{undefined}}", hasError, ""},
|
||||
{"undefined variable", "{{$x}}", hasError, ""},
|
||||
{"variable undefined after end", "{{with $x := 4}}{{end}}{{$x}}", hasError, ""},
|
||||
{"variable undefined in template", "{{template $v}}", hasError, ""},
|
||||
{"declare with field", "{{with $x.Y := 4}}{{end}}", hasError, ""},
|
||||
{"template with field ref", "{{template .X}}", hasError, ""},
|
||||
{"template with var", "{{template $v}}", hasError, ""},
|
||||
{"invalid punctuation", "{{printf 3, 4}}", hasError, ""},
|
||||
{"multidecl outside range", "{{with $v, $u := 3}}{{end}}", hasError, ""},
|
||||
{"too many decls in range", "{{range $u, $v, $w := 3}}{{end}}", hasError, ""},
|
||||
{"dot applied to parentheses", "{{printf (printf .).}}", hasError, ""},
|
||||
{"adjacent args", "{{printf 3`x`}}", hasError, ""},
|
||||
{"adjacent args with .", "{{printf `x`.}}", hasError, ""},
|
||||
{"extra end after if", "{{if .X}}a{{else if .Y}}b{{end}}{{end}}", hasError, ""},
|
||||
{"invalid newline elision", "{{true}}\\{{true}}", hasError, ""},
|
||||
// Equals (and other chars) do not assignments make (yet).
|
||||
{"bug0a", "{{$x := 0}}{{$x}}", noError, "{{$x := 0}}{{$x}}"},
|
||||
{"bug0b", "{{$x = 1}}{{$x}}", hasError, ""},
|
||||
{"bug0c", "{{$x ! 2}}{{$x}}", hasError, ""},
|
||||
{"bug0d", "{{$x % 3}}{{$x}}", hasError, ""},
|
||||
// Check the parse fails for := rather than comma.
|
||||
{"bug0e", "{{range $x := $y := 3}}{{end}}", hasError, ""},
|
||||
// Another bug: variable read must ignore following punctuation.
|
||||
{"bug1a", "{{$x:=.}}{{$x!2}}", hasError, ""}, // ! is just illegal here.
|
||||
{"bug1b", "{{$x:=.}}{{$x+2}}", hasError, ""}, // $x+2 should not parse as ($x) (+2).
|
||||
{"bug1c", "{{$x:=.}}{{$x +2}}", noError, "{{$x := .}}{{$x +2}}"}, // It's OK with a space.
|
||||
}
|
||||
|
||||
var builtins = map[string]interface{}{
|
||||
"printf": fmt.Sprintf,
|
||||
}
|
||||
|
||||
func testParse(doCopy bool, t *testing.T) {
|
||||
textFormat = "%q"
|
||||
defer func() { textFormat = "%s" }()
|
||||
for _, test := range parseTests {
|
||||
tmpl, err := New(test.name).Parse(test.input, "", "", make(map[string]*Tree), builtins)
|
||||
switch {
|
||||
case err == nil && !test.ok:
|
||||
t.Errorf("%q: expected error; got none", test.name)
|
||||
continue
|
||||
case err != nil && test.ok:
|
||||
t.Errorf("%q: unexpected error: %v", test.name, err)
|
||||
continue
|
||||
case err != nil && !test.ok:
|
||||
// expected error, got one
|
||||
if *debug {
|
||||
fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
var result string
|
||||
if doCopy {
|
||||
result = tmpl.Root.Copy().String()
|
||||
} else {
|
||||
result = tmpl.Root.String()
|
||||
}
|
||||
if result != test.result {
|
||||
t.Errorf("%s=(%q): got\n\t%v\nexpected\n\t%v", test.name, test.input, result, test.result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
testParse(false, t)
|
||||
}
|
||||
|
||||
// Same as TestParse, but we copy the node first
|
||||
func TestParseCopy(t *testing.T) {
|
||||
testParse(true, t)
|
||||
}
|
||||
|
||||
type isEmptyTest struct {
|
||||
name string
|
||||
input string
|
||||
empty bool
|
||||
}
|
||||
|
||||
var isEmptyTests = []isEmptyTest{
|
||||
{"empty", ``, true},
|
||||
{"nonempty", `hello`, false},
|
||||
{"spaces only", " \t\n \t\n", true},
|
||||
{"definition", `{{define "x"}}something{{end}}`, true},
|
||||
{"definitions and space", "{{define `x`}}something{{end}}\n\n{{define `y`}}something{{end}}\n\n", true},
|
||||
{"definitions and text", "{{define `x`}}something{{end}}\nx\n{{define `y`}}something{{end}}\ny\n", false},
|
||||
{"definition and action", "{{define `x`}}something{{end}}{{if 3}}foo{{end}}", false},
|
||||
}
|
||||
|
||||
func TestIsEmpty(t *testing.T) {
|
||||
if !IsEmptyTree(nil) {
|
||||
t.Errorf("nil tree is not empty")
|
||||
}
|
||||
for _, test := range isEmptyTests {
|
||||
tree, err := New("root").Parse(test.input, "", "", make(map[string]*Tree), nil)
|
||||
if err != nil {
|
||||
t.Errorf("%q: unexpected error: %v", test.name, err)
|
||||
continue
|
||||
}
|
||||
if empty := IsEmptyTree(tree.Root); empty != test.empty {
|
||||
t.Errorf("%q: expected %t got %t", test.name, test.empty, empty)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestErrorContextWithTreeCopy(t *testing.T) {
|
||||
tree, err := New("root").Parse("{{if true}}{{end}}", "", "", make(map[string]*Tree), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected tree parse failure: %v", err)
|
||||
}
|
||||
treeCopy := tree.Copy()
|
||||
wantLocation, wantContext := tree.ErrorContext(tree.Root.Nodes[0])
|
||||
gotLocation, gotContext := treeCopy.ErrorContext(treeCopy.Root.Nodes[0])
|
||||
if wantLocation != gotLocation {
|
||||
t.Errorf("wrong error location want %q got %q", wantLocation, gotLocation)
|
||||
}
|
||||
if wantContext != gotContext {
|
||||
t.Errorf("wrong error location want %q got %q", wantContext, gotContext)
|
||||
}
|
||||
}
|
||||
|
||||
// All failures, and the result is a string that must appear in the error message.
|
||||
var errorTests = []parseTest{
|
||||
// Check line numbers are accurate.
|
||||
{"unclosed1",
|
||||
"line1\n{{",
|
||||
hasError, `unclosed1:2: unexpected unclosed action in command`},
|
||||
{"unclosed2",
|
||||
"line1\n{{define `x`}}line2\n{{",
|
||||
hasError, `unclosed2:3: unexpected unclosed action in command`},
|
||||
// Specific errors.
|
||||
{"function",
|
||||
"{{foo}}",
|
||||
hasError, `function "foo" not defined`},
|
||||
{"comment",
|
||||
"{{/*}}",
|
||||
hasError, `unclosed comment`},
|
||||
{"lparen",
|
||||
"{{.X (1 2 3}}",
|
||||
hasError, `unclosed left paren`},
|
||||
{"rparen",
|
||||
"{{.X 1 2 3)}}",
|
||||
hasError, `unexpected ")"`},
|
||||
{"space",
|
||||
"{{`x`3}}",
|
||||
hasError, `missing space?`},
|
||||
{"idchar",
|
||||
"{{a#}}",
|
||||
hasError, `'#'`},
|
||||
{"charconst",
|
||||
"{{'a}}",
|
||||
hasError, `unterminated character constant`},
|
||||
{"stringconst",
|
||||
`{{"a}}`,
|
||||
hasError, `unterminated quoted string`},
|
||||
{"rawstringconst",
|
||||
"{{`a}}",
|
||||
hasError, `unterminated raw quoted string`},
|
||||
{"number",
|
||||
"{{0xi}}",
|
||||
hasError, `number syntax`},
|
||||
{"multidefine",
|
||||
"{{define `a`}}a{{end}}{{define `a`}}b{{end}}",
|
||||
hasError, `multiple definition of template`},
|
||||
{"eof",
|
||||
"{{range .X}}",
|
||||
hasError, `unexpected EOF`},
|
||||
{"variable",
|
||||
// Declare $x so it's defined, to avoid that error, and then check we don't parse a declaration.
|
||||
"{{$x := 23}}{{with $x.y := 3}}{{$x 23}}{{end}}",
|
||||
hasError, `unexpected ":="`},
|
||||
{"multidecl",
|
||||
"{{$a,$b,$c := 23}}",
|
||||
hasError, `too many declarations`},
|
||||
{"undefvar",
|
||||
"{{$a}}",
|
||||
hasError, `undefined variable`},
|
||||
}
|
||||
|
||||
func TestErrors(t *testing.T) {
|
||||
for _, test := range errorTests {
|
||||
_, err := New(test.name).Parse(test.input, "", "", make(map[string]*Tree))
|
||||
if err == nil {
|
||||
t.Errorf("%q: expected error", test.name)
|
||||
continue
|
||||
}
|
||||
if !strings.Contains(err.Error(), test.result) {
|
||||
t.Errorf("%q: error %q does not contain %q", test.name, err, test.result)
|
||||
}
|
||||
}
|
||||
}
|
217
Godeps/_workspace/src/github.com/alecthomas/template/template.go
generated
vendored
Normal file
217
Godeps/_workspace/src/github.com/alecthomas/template/template.go
generated
vendored
Normal file
|
@ -0,0 +1,217 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package template
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/alecthomas/template/parse"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// common holds the information shared by related templates.
|
||||
type common struct {
|
||||
tmpl map[string]*Template
|
||||
// We use two maps, one for parsing and one for execution.
|
||||
// This separation makes the API cleaner since it doesn't
|
||||
// expose reflection to the client.
|
||||
parseFuncs FuncMap
|
||||
execFuncs map[string]reflect.Value
|
||||
}
|
||||
|
||||
// Template is the representation of a parsed template. The *parse.Tree
|
||||
// field is exported only for use by html/template and should be treated
|
||||
// as unexported by all other clients.
|
||||
type Template struct {
|
||||
name string
|
||||
*parse.Tree
|
||||
*common
|
||||
leftDelim string
|
||||
rightDelim string
|
||||
}
|
||||
|
||||
// New allocates a new template with the given name.
|
||||
func New(name string) *Template {
|
||||
return &Template{
|
||||
name: name,
|
||||
}
|
||||
}
|
||||
|
||||
// Name returns the name of the template.
|
||||
func (t *Template) Name() string {
|
||||
return t.name
|
||||
}
|
||||
|
||||
// New allocates a new template associated with the given one and with the same
|
||||
// delimiters. The association, which is transitive, allows one template to
|
||||
// invoke another with a {{template}} action.
|
||||
func (t *Template) New(name string) *Template {
|
||||
t.init()
|
||||
return &Template{
|
||||
name: name,
|
||||
common: t.common,
|
||||
leftDelim: t.leftDelim,
|
||||
rightDelim: t.rightDelim,
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Template) init() {
|
||||
if t.common == nil {
|
||||
t.common = new(common)
|
||||
t.tmpl = make(map[string]*Template)
|
||||
t.parseFuncs = make(FuncMap)
|
||||
t.execFuncs = make(map[string]reflect.Value)
|
||||
}
|
||||
}
|
||||
|
||||
// Clone returns a duplicate of the template, including all associated
|
||||
// templates. The actual representation is not copied, but the name space of
|
||||
// associated templates is, so further calls to Parse in the copy will add
|
||||
// templates to the copy but not to the original. Clone can be used to prepare
|
||||
// common templates and use them with variant definitions for other templates
|
||||
// by adding the variants after the clone is made.
|
||||
func (t *Template) Clone() (*Template, error) {
|
||||
nt := t.copy(nil)
|
||||
nt.init()
|
||||
nt.tmpl[t.name] = nt
|
||||
for k, v := range t.tmpl {
|
||||
if k == t.name { // Already installed.
|
||||
continue
|
||||
}
|
||||
// The associated templates share nt's common structure.
|
||||
tmpl := v.copy(nt.common)
|
||||
nt.tmpl[k] = tmpl
|
||||
}
|
||||
for k, v := range t.parseFuncs {
|
||||
nt.parseFuncs[k] = v
|
||||
}
|
||||
for k, v := range t.execFuncs {
|
||||
nt.execFuncs[k] = v
|
||||
}
|
||||
return nt, nil
|
||||
}
|
||||
|
||||
// copy returns a shallow copy of t, with common set to the argument.
|
||||
func (t *Template) copy(c *common) *Template {
|
||||
nt := New(t.name)
|
||||
nt.Tree = t.Tree
|
||||
nt.common = c
|
||||
nt.leftDelim = t.leftDelim
|
||||
nt.rightDelim = t.rightDelim
|
||||
return nt
|
||||
}
|
||||
|
||||
// AddParseTree creates a new template with the name and parse tree
|
||||
// and associates it with t.
|
||||
func (t *Template) AddParseTree(name string, tree *parse.Tree) (*Template, error) {
|
||||
if t.common != nil && t.tmpl[name] != nil {
|
||||
return nil, fmt.Errorf("template: redefinition of template %q", name)
|
||||
}
|
||||
nt := t.New(name)
|
||||
nt.Tree = tree
|
||||
t.tmpl[name] = nt
|
||||
return nt, nil
|
||||
}
|
||||
|
||||
// Templates returns a slice of the templates associated with t, including t
|
||||
// itself.
|
||||
func (t *Template) Templates() []*Template {
|
||||
if t.common == nil {
|
||||
return nil
|
||||
}
|
||||
// Return a slice so we don't expose the map.
|
||||
m := make([]*Template, 0, len(t.tmpl))
|
||||
for _, v := range t.tmpl {
|
||||
m = append(m, v)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// Delims sets the action delimiters to the specified strings, to be used in
|
||||
// subsequent calls to Parse, ParseFiles, or ParseGlob. Nested template
|
||||
// definitions will inherit the settings. An empty delimiter stands for the
|
||||
// corresponding default: {{ or }}.
|
||||
// The return value is the template, so calls can be chained.
|
||||
func (t *Template) Delims(left, right string) *Template {
|
||||
t.leftDelim = left
|
||||
t.rightDelim = right
|
||||
return t
|
||||
}
|
||||
|
||||
// Funcs adds the elements of the argument map to the template's function map.
|
||||
// It panics if a value in the map is not a function with appropriate return
|
||||
// type. However, it is legal to overwrite elements of the map. The return
|
||||
// value is the template, so calls can be chained.
|
||||
func (t *Template) Funcs(funcMap FuncMap) *Template {
|
||||
t.init()
|
||||
addValueFuncs(t.execFuncs, funcMap)
|
||||
addFuncs(t.parseFuncs, funcMap)
|
||||
return t
|
||||
}
|
||||
|
||||
// Lookup returns the template with the given name that is associated with t,
|
||||
// or nil if there is no such template.
|
||||
func (t *Template) Lookup(name string) *Template {
|
||||
if t.common == nil {
|
||||
return nil
|
||||
}
|
||||
return t.tmpl[name]
|
||||
}
|
||||
|
||||
// Parse parses a string into a template. Nested template definitions will be
|
||||
// associated with the top-level template t. Parse may be called multiple times
|
||||
// to parse definitions of templates to associate with t. It is an error if a
|
||||
// resulting template is non-empty (contains content other than template
|
||||
// definitions) and would replace a non-empty template with the same name.
|
||||
// (In multiple calls to Parse with the same receiver template, only one call
|
||||
// can contain text other than space, comments, and template definitions.)
|
||||
func (t *Template) Parse(text string) (*Template, error) {
|
||||
t.init()
|
||||
trees, err := parse.Parse(t.name, text, t.leftDelim, t.rightDelim, t.parseFuncs, builtins)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Add the newly parsed trees, including the one for t, into our common structure.
|
||||
for name, tree := range trees {
|
||||
// If the name we parsed is the name of this template, overwrite this template.
|
||||
// The associate method checks it's not a redefinition.
|
||||
tmpl := t
|
||||
if name != t.name {
|
||||
tmpl = t.New(name)
|
||||
}
|
||||
// Even if t == tmpl, we need to install it in the common.tmpl map.
|
||||
if replace, err := t.associate(tmpl, tree); err != nil {
|
||||
return nil, err
|
||||
} else if replace {
|
||||
tmpl.Tree = tree
|
||||
}
|
||||
tmpl.leftDelim = t.leftDelim
|
||||
tmpl.rightDelim = t.rightDelim
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// associate installs the new template into the group of templates associated
|
||||
// with t. It is an error to reuse a name except to overwrite an empty
|
||||
// template. The two are already known to share the common structure.
|
||||
// The boolean return value reports wither to store this tree as t.Tree.
|
||||
func (t *Template) associate(new *Template, tree *parse.Tree) (bool, error) {
|
||||
if new.common != t.common {
|
||||
panic("internal error: associate not common")
|
||||
}
|
||||
name := new.name
|
||||
if old := t.tmpl[name]; old != nil {
|
||||
oldIsEmpty := parse.IsEmptyTree(old.Root)
|
||||
newIsEmpty := parse.IsEmptyTree(tree.Root)
|
||||
if newIsEmpty {
|
||||
// Whether old is empty or not, new is empty; no reason to replace old.
|
||||
return false, nil
|
||||
}
|
||||
if !oldIsEmpty {
|
||||
return false, fmt.Errorf("template: redefinition of template %q", name)
|
||||
}
|
||||
}
|
||||
t.tmpl[name] = new
|
||||
return true, nil
|
||||
}
|
2
Godeps/_workspace/src/github.com/alecthomas/template/testdata/file1.tmpl
generated
vendored
Normal file
2
Godeps/_workspace/src/github.com/alecthomas/template/testdata/file1.tmpl
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
{{define "x"}}TEXT{{end}}
|
||||
{{define "dotV"}}{{.V}}{{end}}
|
2
Godeps/_workspace/src/github.com/alecthomas/template/testdata/file2.tmpl
generated
vendored
Normal file
2
Godeps/_workspace/src/github.com/alecthomas/template/testdata/file2.tmpl
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
{{define "dot"}}{{.}}{{end}}
|
||||
{{define "nested"}}{{template "dot" .}}{{end}}
|
3
Godeps/_workspace/src/github.com/alecthomas/template/testdata/tmpl1.tmpl
generated
vendored
Normal file
3
Godeps/_workspace/src/github.com/alecthomas/template/testdata/tmpl1.tmpl
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
template1
|
||||
{{define "x"}}x{{end}}
|
||||
{{template "y"}}
|
3
Godeps/_workspace/src/github.com/alecthomas/template/testdata/tmpl2.tmpl
generated
vendored
Normal file
3
Godeps/_workspace/src/github.com/alecthomas/template/testdata/tmpl2.tmpl
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
template2
|
||||
{{define "y"}}y{{end}}
|
||||
{{template "x"}}
|
19
Godeps/_workspace/src/github.com/alecthomas/units/COPYING
generated
vendored
Normal file
19
Godeps/_workspace/src/github.com/alecthomas/units/COPYING
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
Copyright (C) 2014 Alec Thomas
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
11
Godeps/_workspace/src/github.com/alecthomas/units/README.md
generated
vendored
Normal file
11
Godeps/_workspace/src/github.com/alecthomas/units/README.md
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
# Units - Helpful unit multipliers and functions for Go
|
||||
|
||||
The goal of this package is to have functionality similar to the [time](http://golang.org/pkg/time/) package.
|
||||
|
||||
It allows for code like this:
|
||||
|
||||
```go
|
||||
n, err := ParseBase2Bytes("1KB")
|
||||
// n == 1024
|
||||
n = units.Mebibyte * 512
|
||||
```
|
83
Godeps/_workspace/src/github.com/alecthomas/units/bytes.go
generated
vendored
Normal file
83
Godeps/_workspace/src/github.com/alecthomas/units/bytes.go
generated
vendored
Normal file
|
@ -0,0 +1,83 @@
|
|||
package units
|
||||
|
||||
// Base2Bytes is the old non-SI power-of-2 byte scale (1024 bytes in a kilobyte,
|
||||
// etc.).
|
||||
type Base2Bytes int64
|
||||
|
||||
// Base-2 byte units.
|
||||
const (
|
||||
Kibibyte Base2Bytes = 1024
|
||||
KiB = Kibibyte
|
||||
Mebibyte = Kibibyte * 1024
|
||||
MiB = Mebibyte
|
||||
Gibibyte = Mebibyte * 1024
|
||||
GiB = Gibibyte
|
||||
Tebibyte = Gibibyte * 1024
|
||||
TiB = Tebibyte
|
||||
Pebibyte = Tebibyte * 1024
|
||||
PiB = Pebibyte
|
||||
Exbibyte = Pebibyte * 1024
|
||||
EiB = Exbibyte
|
||||
)
|
||||
|
||||
var (
|
||||
bytesUnitMap = MakeUnitMap("iB", "B", 1024)
|
||||
oldBytesUnitMap = MakeUnitMap("B", "B", 1024)
|
||||
)
|
||||
|
||||
// ParseBase2Bytes supports both iB and B in base-2 multipliers. That is, KB
|
||||
// and KiB are both 1024.
|
||||
func ParseBase2Bytes(s string) (Base2Bytes, error) {
|
||||
n, err := ParseUnit(s, bytesUnitMap)
|
||||
if err != nil {
|
||||
n, err = ParseUnit(s, oldBytesUnitMap)
|
||||
}
|
||||
return Base2Bytes(n), err
|
||||
}
|
||||
|
||||
func (b Base2Bytes) String() string {
|
||||
return ToString(int64(b), 1024, "iB", "B")
|
||||
}
|
||||
|
||||
var (
|
||||
metricBytesUnitMap = MakeUnitMap("B", "B", 1000)
|
||||
)
|
||||
|
||||
// MetricBytes are SI byte units (1000 bytes in a kilobyte).
|
||||
type MetricBytes SI
|
||||
|
||||
// SI base-10 byte units.
|
||||
const (
|
||||
Kilobyte MetricBytes = 1000
|
||||
KB = Kilobyte
|
||||
Megabyte = Kilobyte * 1000
|
||||
MB = Megabyte
|
||||
Gigabyte = Megabyte * 1000
|
||||
GB = Gigabyte
|
||||
Terabyte = Gigabyte * 1000
|
||||
TB = Terabyte
|
||||
Petabyte = Terabyte * 1000
|
||||
PB = Petabyte
|
||||
Exabyte = Petabyte * 1000
|
||||
EB = Exabyte
|
||||
)
|
||||
|
||||
// ParseMetricBytes parses base-10 metric byte units. That is, KB is 1000 bytes.
|
||||
func ParseMetricBytes(s string) (MetricBytes, error) {
|
||||
n, err := ParseUnit(s, metricBytesUnitMap)
|
||||
return MetricBytes(n), err
|
||||
}
|
||||
|
||||
func (m MetricBytes) String() string {
|
||||
return ToString(int64(m), 1000, "B", "B")
|
||||
}
|
||||
|
||||
// ParseStrictBytes supports both iB and B suffixes for base 2 and metric,
|
||||
// respectively. That is, KiB represents 1024 and KB represents 1000.
|
||||
func ParseStrictBytes(s string) (int64, error) {
|
||||
n, err := ParseUnit(s, bytesUnitMap)
|
||||
if err != nil {
|
||||
n, err = ParseUnit(s, metricBytesUnitMap)
|
||||
}
|
||||
return int64(n), err
|
||||
}
|
49
Godeps/_workspace/src/github.com/alecthomas/units/bytes_test.go
generated
vendored
Normal file
49
Godeps/_workspace/src/github.com/alecthomas/units/bytes_test.go
generated
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
package units
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestBase2BytesString(t *testing.T) {
|
||||
assert.Equal(t, Base2Bytes(0).String(), "0B")
|
||||
assert.Equal(t, Base2Bytes(1025).String(), "1KiB1B")
|
||||
assert.Equal(t, Base2Bytes(1048577).String(), "1MiB1B")
|
||||
}
|
||||
|
||||
func TestParseBase2Bytes(t *testing.T) {
|
||||
n, err := ParseBase2Bytes("0B")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 0, n)
|
||||
n, err = ParseBase2Bytes("1KB")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1024, n)
|
||||
n, err = ParseBase2Bytes("1MB1KB25B")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1049625, n)
|
||||
n, err = ParseBase2Bytes("1.5MB")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1572864, n)
|
||||
}
|
||||
|
||||
func TestMetricBytesString(t *testing.T) {
|
||||
assert.Equal(t, MetricBytes(0).String(), "0B")
|
||||
assert.Equal(t, MetricBytes(1001).String(), "1KB1B")
|
||||
assert.Equal(t, MetricBytes(1001025).String(), "1MB1KB25B")
|
||||
}
|
||||
|
||||
func TestParseMetricBytes(t *testing.T) {
|
||||
n, err := ParseMetricBytes("0B")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 0, n)
|
||||
n, err = ParseMetricBytes("1KB1B")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1001, n)
|
||||
n, err = ParseMetricBytes("1MB1KB25B")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1001025, n)
|
||||
n, err = ParseMetricBytes("1.5MB")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1500000, n)
|
||||
}
|
13
Godeps/_workspace/src/github.com/alecthomas/units/doc.go
generated
vendored
Normal file
13
Godeps/_workspace/src/github.com/alecthomas/units/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
// Package units provides helpful unit multipliers and functions for Go.
|
||||
//
|
||||
// The goal of this package is to have functionality similar to the time [1] package.
|
||||
//
|
||||
//
|
||||
// [1] http://golang.org/pkg/time/
|
||||
//
|
||||
// It allows for code like this:
|
||||
//
|
||||
// n, err := ParseBase2Bytes("1KB")
|
||||
// // n == 1024
|
||||
// n = units.Mebibyte * 512
|
||||
package units
|
26
Godeps/_workspace/src/github.com/alecthomas/units/si.go
generated
vendored
Normal file
26
Godeps/_workspace/src/github.com/alecthomas/units/si.go
generated
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
package units
|
||||
|
||||
// SI units.
|
||||
type SI int64
|
||||
|
||||
// SI unit multiples.
|
||||
const (
|
||||
Kilo SI = 1000
|
||||
Mega = Kilo * 1000
|
||||
Giga = Mega * 1000
|
||||
Tera = Giga * 1000
|
||||
Peta = Tera * 1000
|
||||
Exa = Peta * 1000
|
||||
)
|
||||
|
||||
func MakeUnitMap(suffix, shortSuffix string, scale int64) map[string]float64 {
|
||||
return map[string]float64{
|
||||
shortSuffix: 1,
|
||||
"K" + suffix: float64(scale),
|
||||
"M" + suffix: float64(scale * scale),
|
||||
"G" + suffix: float64(scale * scale * scale),
|
||||
"T" + suffix: float64(scale * scale * scale * scale),
|
||||
"P" + suffix: float64(scale * scale * scale * scale * scale),
|
||||
"E" + suffix: float64(scale * scale * scale * scale * scale * scale),
|
||||
}
|
||||
}
|
138
Godeps/_workspace/src/github.com/alecthomas/units/util.go
generated
vendored
Normal file
138
Godeps/_workspace/src/github.com/alecthomas/units/util.go
generated
vendored
Normal file
|
@ -0,0 +1,138 @@
|
|||
package units
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
siUnits = []string{"", "K", "M", "G", "T", "P", "E"}
|
||||
)
|
||||
|
||||
func ToString(n int64, scale int64, suffix, baseSuffix string) string {
|
||||
mn := len(siUnits)
|
||||
out := make([]string, mn)
|
||||
for i, m := range siUnits {
|
||||
if n%scale != 0 || i == 0 && n == 0 {
|
||||
s := suffix
|
||||
if i == 0 {
|
||||
s = baseSuffix
|
||||
}
|
||||
out[mn-1-i] = fmt.Sprintf("%d%s%s", n%scale, m, s)
|
||||
}
|
||||
n /= scale
|
||||
if n == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return strings.Join(out, "")
|
||||
}
|
||||
|
||||
// Below code ripped straight from http://golang.org/src/pkg/time/format.go?s=33392:33438#L1123
|
||||
var errLeadingInt = errors.New("units: bad [0-9]*") // never printed
|
||||
|
||||
// leadingInt consumes the leading [0-9]* from s.
|
||||
func leadingInt(s string) (x int64, rem string, err error) {
|
||||
i := 0
|
||||
for ; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if c < '0' || c > '9' {
|
||||
break
|
||||
}
|
||||
if x >= (1<<63-10)/10 {
|
||||
// overflow
|
||||
return 0, "", errLeadingInt
|
||||
}
|
||||
x = x*10 + int64(c) - '0'
|
||||
}
|
||||
return x, s[i:], nil
|
||||
}
|
||||
|
||||
func ParseUnit(s string, unitMap map[string]float64) (int64, error) {
|
||||
// [-+]?([0-9]*(\.[0-9]*)?[a-z]+)+
|
||||
orig := s
|
||||
f := float64(0)
|
||||
neg := false
|
||||
|
||||
// Consume [-+]?
|
||||
if s != "" {
|
||||
c := s[0]
|
||||
if c == '-' || c == '+' {
|
||||
neg = c == '-'
|
||||
s = s[1:]
|
||||
}
|
||||
}
|
||||
// Special case: if all that is left is "0", this is zero.
|
||||
if s == "0" {
|
||||
return 0, nil
|
||||
}
|
||||
if s == "" {
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
for s != "" {
|
||||
g := float64(0) // this element of the sequence
|
||||
|
||||
var x int64
|
||||
var err error
|
||||
|
||||
// The next character must be [0-9.]
|
||||
if !(s[0] == '.' || ('0' <= s[0] && s[0] <= '9')) {
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
// Consume [0-9]*
|
||||
pl := len(s)
|
||||
x, s, err = leadingInt(s)
|
||||
if err != nil {
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
g = float64(x)
|
||||
pre := pl != len(s) // whether we consumed anything before a period
|
||||
|
||||
// Consume (\.[0-9]*)?
|
||||
post := false
|
||||
if s != "" && s[0] == '.' {
|
||||
s = s[1:]
|
||||
pl := len(s)
|
||||
x, s, err = leadingInt(s)
|
||||
if err != nil {
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
scale := 1.0
|
||||
for n := pl - len(s); n > 0; n-- {
|
||||
scale *= 10
|
||||
}
|
||||
g += float64(x) / scale
|
||||
post = pl != len(s)
|
||||
}
|
||||
if !pre && !post {
|
||||
// no digits (e.g. ".s" or "-.s")
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
|
||||
// Consume unit.
|
||||
i := 0
|
||||
for ; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if c == '.' || ('0' <= c && c <= '9') {
|
||||
break
|
||||
}
|
||||
}
|
||||
u := s[:i]
|
||||
s = s[i:]
|
||||
unit, ok := unitMap[u]
|
||||
if !ok {
|
||||
return 0, errors.New("units: unknown unit " + u + " in " + orig)
|
||||
}
|
||||
|
||||
f += g * unit
|
||||
}
|
||||
|
||||
if neg {
|
||||
f = -f
|
||||
}
|
||||
if f < float64(-1<<63) || f > float64(1<<63-1) {
|
||||
return 0, errors.New("units: overflow parsing unit")
|
||||
}
|
||||
return int64(f), nil
|
||||
}
|
22
Godeps/_workspace/src/github.com/cenkalti/backoff/.gitignore
generated
vendored
Normal file
22
Godeps/_workspace/src/github.com/cenkalti/backoff/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
|
||||
# Folders
|
||||
_obj
|
||||
_test
|
||||
|
||||
# Architecture specific extensions/prefixes
|
||||
*.[568vq]
|
||||
[568vq].out
|
||||
|
||||
*.cgo1.go
|
||||
*.cgo2.c
|
||||
_cgo_defun.c
|
||||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
2
Godeps/_workspace/src/github.com/cenkalti/backoff/.travis.yml
generated
vendored
Normal file
2
Godeps/_workspace/src/github.com/cenkalti/backoff/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
language: go
|
||||
go: 1.3.3
|
20
Godeps/_workspace/src/github.com/cenkalti/backoff/LICENSE
generated
vendored
Normal file
20
Godeps/_workspace/src/github.com/cenkalti/backoff/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Cenk Altı
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
116
Godeps/_workspace/src/github.com/cenkalti/backoff/README.md
generated
vendored
Normal file
116
Godeps/_workspace/src/github.com/cenkalti/backoff/README.md
generated
vendored
Normal file
|
@ -0,0 +1,116 @@
|
|||
# Exponential Backoff [![GoDoc][godoc image]][godoc] [![Build Status][travis image]][travis]
|
||||
|
||||
This is a Go port of the exponential backoff algorithm from [Google's HTTP Client Library for Java][google-http-java-client].
|
||||
|
||||
[Exponential backoff][exponential backoff wiki]
|
||||
is an algorithm that uses feedback to multiplicatively decrease the rate of some process,
|
||||
in order to gradually find an acceptable rate.
|
||||
The retries exponentially increase and stop increasing when a certain threshold is met.
|
||||
|
||||
## How To
|
||||
|
||||
We define two functions, `Retry()` and `RetryNotify()`.
|
||||
They receive an `Operation` to execute, a `BackOff` algorithm,
|
||||
and an optional `Notify` error handler.
|
||||
|
||||
The operation will be executed, and will be retried on failure with delay
|
||||
as given by the backoff algorithm. The backoff algorithm can also decide when to stop
|
||||
retrying.
|
||||
In addition, the notify error handler will be called after each failed attempt,
|
||||
except for the last time, whose error should be handled by the caller.
|
||||
|
||||
```go
|
||||
// An Operation is executing by Retry() or RetryNotify().
|
||||
// The operation will be retried using a backoff policy if it returns an error.
|
||||
type Operation func() error
|
||||
|
||||
// Notify is a notify-on-error function. It receives an operation error and
|
||||
// backoff delay if the operation failed (with an error).
|
||||
//
|
||||
// NOTE that if the backoff policy stated to stop retrying,
|
||||
// the notify function isn't called.
|
||||
type Notify func(error, time.Duration)
|
||||
|
||||
func Retry(Operation, BackOff) error
|
||||
func RetryNotify(Operation, BackOff, Notify)
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
See more advanced examples in the [godoc][advanced example].
|
||||
|
||||
### Retry
|
||||
|
||||
Simple retry helper that uses the default exponential backoff algorithm:
|
||||
|
||||
```go
|
||||
operation := func() error {
|
||||
// An operation that might fail.
|
||||
return nil // or return errors.New("some error")
|
||||
}
|
||||
|
||||
err := Retry(operation, NewExponentialBackOff())
|
||||
if err != nil {
|
||||
// Handle error.
|
||||
return err
|
||||
}
|
||||
|
||||
// Operation is successful.
|
||||
return nil
|
||||
```
|
||||
|
||||
### Ticker
|
||||
|
||||
```go
|
||||
operation := func() error {
|
||||
// An operation that might fail
|
||||
return nil // or return errors.New("some error")
|
||||
}
|
||||
|
||||
b := NewExponentialBackOff()
|
||||
ticker := NewTicker(b)
|
||||
|
||||
var err error
|
||||
|
||||
// Ticks will continue to arrive when the previous operation is still running,
|
||||
// so operations that take a while to fail could run in quick succession.
|
||||
for range ticker.C {
|
||||
if err = operation(); err != nil {
|
||||
log.Println(err, "will retry...")
|
||||
continue
|
||||
}
|
||||
|
||||
ticker.Stop()
|
||||
break
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
// Operation has failed.
|
||||
return err
|
||||
}
|
||||
|
||||
// Operation is successful.
|
||||
return nil
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
```bash
|
||||
# install
|
||||
$ go get github.com/cenkalti/backoff
|
||||
|
||||
# test
|
||||
$ cd $GOPATH/src/github.com/cenkalti/backoff
|
||||
$ go get -t ./...
|
||||
$ go test -v -cover
|
||||
```
|
||||
|
||||
[godoc]: https://godoc.org/github.com/cenkalti/backoff
|
||||
[godoc image]: https://godoc.org/github.com/cenkalti/backoff?status.png
|
||||
[travis]: https://travis-ci.org/cenkalti/backoff
|
||||
[travis image]: https://travis-ci.org/cenkalti/backoff.png
|
||||
|
||||
[google-http-java-client]: https://github.com/google/google-http-java-client
|
||||
[exponential backoff wiki]: http://en.wikipedia.org/wiki/Exponential_backoff
|
||||
|
||||
[advanced example]: https://godoc.org/github.com/cenkalti/backoff#example_
|
117
Godeps/_workspace/src/github.com/cenkalti/backoff/adv_example_test.go
generated
vendored
Normal file
117
Godeps/_workspace/src/github.com/cenkalti/backoff/adv_example_test.go
generated
vendored
Normal file
|
@ -0,0 +1,117 @@
|
|||
package backoff
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// This is an example that demonstrates how this package could be used
|
||||
// to perform various advanced operations.
|
||||
//
|
||||
// It executes an HTTP GET request with exponential backoff,
|
||||
// while errors are logged and failed responses are closed, as required by net/http package.
|
||||
//
|
||||
// Note we define a condition function which is used inside the operation to
|
||||
// determine whether the operation succeeded or failed.
|
||||
func Example() error {
|
||||
res, err := GetWithRetry(
|
||||
"http://localhost:9999",
|
||||
ErrorIfStatusCodeIsNot(http.StatusOK),
|
||||
NewExponentialBackOff())
|
||||
|
||||
if err != nil {
|
||||
// Close response body of last (failed) attempt.
|
||||
// The Last attempt isn't handled by the notify-on-error function,
|
||||
// which closes the body of all the previous attempts.
|
||||
if e := res.Body.Close(); e != nil {
|
||||
log.Printf("error closing last attempt's response body: %s", e)
|
||||
}
|
||||
log.Printf("too many failed request attempts: %s", err)
|
||||
return err
|
||||
}
|
||||
defer res.Body.Close() // The response's Body must be closed.
|
||||
|
||||
// Read body
|
||||
_, _ = ioutil.ReadAll(res.Body)
|
||||
|
||||
// Do more stuff
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetWithRetry is a helper function that performs an HTTP GET request
|
||||
// to the given URL, and retries with the given backoff using the given condition function.
|
||||
//
|
||||
// It also uses a notify-on-error function which logs
|
||||
// and closes the response body of the failed request.
|
||||
func GetWithRetry(url string, condition Condition, bck BackOff) (*http.Response, error) {
|
||||
var res *http.Response
|
||||
err := RetryNotify(
|
||||
func() error {
|
||||
var err error
|
||||
res, err = http.Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return condition(res)
|
||||
},
|
||||
bck,
|
||||
LogAndClose())
|
||||
|
||||
return res, err
|
||||
}
|
||||
|
||||
// Condition is a retry condition function.
|
||||
// It receives a response, and returns an error
|
||||
// if the response failed the condition.
|
||||
type Condition func(*http.Response) error
|
||||
|
||||
// ErrorIfStatusCodeIsNot returns a retry condition function.
|
||||
// The condition returns an error
|
||||
// if the given response's status code is not the given HTTP status code.
|
||||
func ErrorIfStatusCodeIsNot(status int) Condition {
|
||||
return func(res *http.Response) error {
|
||||
if res.StatusCode != status {
|
||||
return NewError(res)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Error is returned on ErrorIfX() condition functions throughout this package.
|
||||
type Error struct {
|
||||
Response *http.Response
|
||||
}
|
||||
|
||||
func NewError(res *http.Response) *Error {
|
||||
// Sanity check
|
||||
if res == nil {
|
||||
panic("response object is nil")
|
||||
}
|
||||
return &Error{Response: res}
|
||||
}
|
||||
func (err *Error) Error() string { return "request failed" }
|
||||
|
||||
// LogAndClose is a notify-on-error function.
|
||||
// It logs the error and closes the response body.
|
||||
func LogAndClose() Notify {
|
||||
return func(err error, wait time.Duration) {
|
||||
switch e := err.(type) {
|
||||
case *Error:
|
||||
defer e.Response.Body.Close()
|
||||
|
||||
b, err := ioutil.ReadAll(e.Response.Body)
|
||||
var body string
|
||||
if err != nil {
|
||||
body = "can't read body"
|
||||
} else {
|
||||
body = string(b)
|
||||
}
|
||||
|
||||
log.Printf("%s: %s", e.Response.Status, body)
|
||||
default:
|
||||
log.Println(err)
|
||||
}
|
||||
}
|
||||
}
|
59
Godeps/_workspace/src/github.com/cenkalti/backoff/backoff.go
generated
vendored
Normal file
59
Godeps/_workspace/src/github.com/cenkalti/backoff/backoff.go
generated
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
// Package backoff implements backoff algorithms for retrying operations.
|
||||
//
|
||||
// Also has a Retry() helper for retrying operations that may fail.
|
||||
package backoff
|
||||
|
||||
import "time"
|
||||
|
||||
// BackOff is a backoff policy for retrying an operation.
|
||||
type BackOff interface {
|
||||
// NextBackOff returns the duration to wait before retrying the operation,
|
||||
// or backoff.Stop to indicate that no more retries should be made.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// duration := backoff.NextBackOff();
|
||||
// if (duration == backoff.Stop) {
|
||||
// // Do not retry operation.
|
||||
// } else {
|
||||
// // Sleep for duration and retry operation.
|
||||
// }
|
||||
//
|
||||
NextBackOff() time.Duration
|
||||
|
||||
// Reset to initial state.
|
||||
Reset()
|
||||
}
|
||||
|
||||
// Indicates that no more retries should be made for use in NextBackOff().
|
||||
const Stop time.Duration = -1
|
||||
|
||||
// ZeroBackOff is a fixed backoff policy whose backoff time is always zero,
|
||||
// meaning that the operation is retried immediately without waiting, indefinitely.
|
||||
type ZeroBackOff struct{}
|
||||
|
||||
func (b *ZeroBackOff) Reset() {}
|
||||
|
||||
func (b *ZeroBackOff) NextBackOff() time.Duration { return 0 }
|
||||
|
||||
// StopBackOff is a fixed backoff policy that always returns backoff.Stop for
|
||||
// NextBackOff(), meaning that the operation should never be retried.
|
||||
type StopBackOff struct{}
|
||||
|
||||
func (b *StopBackOff) Reset() {}
|
||||
|
||||
func (b *StopBackOff) NextBackOff() time.Duration { return Stop }
|
||||
|
||||
// ConstantBackOff is a backoff policy that always returns the same backoff delay.
|
||||
// This is in contrast to an exponential backoff policy,
|
||||
// which returns a delay that grows longer as you call NextBackOff() over and over again.
|
||||
type ConstantBackOff struct {
|
||||
Interval time.Duration
|
||||
}
|
||||
|
||||
func (b *ConstantBackOff) Reset() {}
|
||||
func (b *ConstantBackOff) NextBackOff() time.Duration { return b.Interval }
|
||||
|
||||
func NewConstantBackOff(d time.Duration) *ConstantBackOff {
|
||||
return &ConstantBackOff{Interval: d}
|
||||
}
|
27
Godeps/_workspace/src/github.com/cenkalti/backoff/backoff_test.go
generated
vendored
Normal file
27
Godeps/_workspace/src/github.com/cenkalti/backoff/backoff_test.go
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
package backoff
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestNextBackOffMillis(t *testing.T) {
|
||||
subtestNextBackOff(t, 0, new(ZeroBackOff))
|
||||
subtestNextBackOff(t, Stop, new(StopBackOff))
|
||||
}
|
||||
|
||||
func subtestNextBackOff(t *testing.T, expectedValue time.Duration, backOffPolicy BackOff) {
|
||||
for i := 0; i < 10; i++ {
|
||||
next := backOffPolicy.NextBackOff()
|
||||
if next != expectedValue {
|
||||
t.Errorf("got: %d expected: %d", next, expectedValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestConstantBackOff(t *testing.T) {
|
||||
backoff := NewConstantBackOff(time.Second)
|
||||
if backoff.NextBackOff() != time.Second {
|
||||
t.Error("invalid interval")
|
||||
}
|
||||
}
|
51
Godeps/_workspace/src/github.com/cenkalti/backoff/example_test.go
generated
vendored
Normal file
51
Godeps/_workspace/src/github.com/cenkalti/backoff/example_test.go
generated
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
package backoff
|
||||
|
||||
import "log"
|
||||
|
||||
func ExampleRetry() error {
|
||||
operation := func() error {
|
||||
// An operation that might fail.
|
||||
return nil // or return errors.New("some error")
|
||||
}
|
||||
|
||||
err := Retry(operation, NewExponentialBackOff())
|
||||
if err != nil {
|
||||
// Handle error.
|
||||
return err
|
||||
}
|
||||
|
||||
// Operation is successful.
|
||||
return nil
|
||||
}
|
||||
|
||||
func ExampleTicker() error {
|
||||
operation := func() error {
|
||||
// An operation that might fail
|
||||
return nil // or return errors.New("some error")
|
||||
}
|
||||
|
||||
b := NewExponentialBackOff()
|
||||
ticker := NewTicker(b)
|
||||
|
||||
var err error
|
||||
|
||||
// Ticks will continue to arrive when the previous operation is still running,
|
||||
// so operations that take a while to fail could run in quick succession.
|
||||
for _ = range ticker.C {
|
||||
if err = operation(); err != nil {
|
||||
log.Println(err, "will retry...")
|
||||
continue
|
||||
}
|
||||
|
||||
ticker.Stop()
|
||||
break
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
// Operation has failed.
|
||||
return err
|
||||
}
|
||||
|
||||
// Operation is successful.
|
||||
return nil
|
||||
}
|
151
Godeps/_workspace/src/github.com/cenkalti/backoff/exponential.go
generated
vendored
Normal file
151
Godeps/_workspace/src/github.com/cenkalti/backoff/exponential.go
generated
vendored
Normal file
|
@ -0,0 +1,151 @@
|
|||
package backoff
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"time"
|
||||
)
|
||||
|
||||
/*
|
||||
ExponentialBackOff is a backoff implementation that increases the backoff
|
||||
period for each retry attempt using a randomization function that grows exponentially.
|
||||
|
||||
NextBackOff() is calculated using the following formula:
|
||||
|
||||
randomized interval =
|
||||
RetryInterval * (random value in range [1 - RandomizationFactor, 1 + RandomizationFactor])
|
||||
|
||||
In other words NextBackOff() will range between the randomization factor
|
||||
percentage below and above the retry interval.
|
||||
|
||||
For example, given the following parameters:
|
||||
|
||||
RetryInterval = 2
|
||||
RandomizationFactor = 0.5
|
||||
Multiplier = 2
|
||||
|
||||
the actual backoff period used in the next retry attempt will range between 1 and 3 seconds,
|
||||
multiplied by the exponential, that is, between 2 and 6 seconds.
|
||||
|
||||
Note: MaxInterval caps the RetryInterval and not the randomized interval.
|
||||
|
||||
If the time elapsed since an ExponentialBackOff instance is created goes past the
|
||||
MaxElapsedTime, then the method NextBackOff() starts returning backoff.Stop.
|
||||
|
||||
The elapsed time can be reset by calling Reset().
|
||||
|
||||
Example: Given the following default arguments, for 10 tries the sequence will be,
|
||||
and assuming we go over the MaxElapsedTime on the 10th try:
|
||||
|
||||
Request # RetryInterval (seconds) Randomized Interval (seconds)
|
||||
|
||||
1 0.5 [0.25, 0.75]
|
||||
2 0.75 [0.375, 1.125]
|
||||
3 1.125 [0.562, 1.687]
|
||||
4 1.687 [0.8435, 2.53]
|
||||
5 2.53 [1.265, 3.795]
|
||||
6 3.795 [1.897, 5.692]
|
||||
7 5.692 [2.846, 8.538]
|
||||
8 8.538 [4.269, 12.807]
|
||||
9 12.807 [6.403, 19.210]
|
||||
10 19.210 backoff.Stop
|
||||
|
||||
Note: Implementation is not thread-safe.
|
||||
*/
|
||||
type ExponentialBackOff struct {
|
||||
InitialInterval time.Duration
|
||||
RandomizationFactor float64
|
||||
Multiplier float64
|
||||
MaxInterval time.Duration
|
||||
// After MaxElapsedTime the ExponentialBackOff stops.
|
||||
// It never stops if MaxElapsedTime == 0.
|
||||
MaxElapsedTime time.Duration
|
||||
Clock Clock
|
||||
|
||||
currentInterval time.Duration
|
||||
startTime time.Time
|
||||
}
|
||||
|
||||
// Clock is an interface that returns current time for BackOff.
|
||||
type Clock interface {
|
||||
Now() time.Time
|
||||
}
|
||||
|
||||
// Default values for ExponentialBackOff.
|
||||
const (
|
||||
DefaultInitialInterval = 500 * time.Millisecond
|
||||
DefaultRandomizationFactor = 0.5
|
||||
DefaultMultiplier = 1.5
|
||||
DefaultMaxInterval = 60 * time.Second
|
||||
DefaultMaxElapsedTime = 15 * time.Minute
|
||||
)
|
||||
|
||||
// NewExponentialBackOff creates an instance of ExponentialBackOff using default values.
|
||||
func NewExponentialBackOff() *ExponentialBackOff {
|
||||
b := &ExponentialBackOff{
|
||||
InitialInterval: DefaultInitialInterval,
|
||||
RandomizationFactor: DefaultRandomizationFactor,
|
||||
Multiplier: DefaultMultiplier,
|
||||
MaxInterval: DefaultMaxInterval,
|
||||
MaxElapsedTime: DefaultMaxElapsedTime,
|
||||
Clock: SystemClock,
|
||||
}
|
||||
b.Reset()
|
||||
return b
|
||||
}
|
||||
|
||||
type systemClock struct{}
|
||||
|
||||
func (t systemClock) Now() time.Time {
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
// SystemClock implements Clock interface that uses time.Now().
|
||||
var SystemClock = systemClock{}
|
||||
|
||||
// Reset the interval back to the initial retry interval and restarts the timer.
|
||||
func (b *ExponentialBackOff) Reset() {
|
||||
b.currentInterval = b.InitialInterval
|
||||
b.startTime = b.Clock.Now()
|
||||
}
|
||||
|
||||
// NextBackOff calculates the next backoff interval using the formula:
|
||||
// Randomized interval = RetryInterval +/- (RandomizationFactor * RetryInterval)
|
||||
func (b *ExponentialBackOff) NextBackOff() time.Duration {
|
||||
// Make sure we have not gone over the maximum elapsed time.
|
||||
if b.MaxElapsedTime != 0 && b.GetElapsedTime() > b.MaxElapsedTime {
|
||||
return Stop
|
||||
}
|
||||
defer b.incrementCurrentInterval()
|
||||
return getRandomValueFromInterval(b.RandomizationFactor, rand.Float64(), b.currentInterval)
|
||||
}
|
||||
|
||||
// GetElapsedTime returns the elapsed time since an ExponentialBackOff instance
|
||||
// is created and is reset when Reset() is called.
|
||||
//
|
||||
// The elapsed time is computed using time.Now().UnixNano().
|
||||
func (b *ExponentialBackOff) GetElapsedTime() time.Duration {
|
||||
return b.Clock.Now().Sub(b.startTime)
|
||||
}
|
||||
|
||||
// Increments the current interval by multiplying it with the multiplier.
|
||||
func (b *ExponentialBackOff) incrementCurrentInterval() {
|
||||
// Check for overflow, if overflow is detected set the current interval to the max interval.
|
||||
if float64(b.currentInterval) >= float64(b.MaxInterval)/b.Multiplier {
|
||||
b.currentInterval = b.MaxInterval
|
||||
} else {
|
||||
b.currentInterval = time.Duration(float64(b.currentInterval) * b.Multiplier)
|
||||
}
|
||||
}
|
||||
|
||||
// Returns a random value from the following interval:
|
||||
// [randomizationFactor * currentInterval, randomizationFactor * currentInterval].
|
||||
func getRandomValueFromInterval(randomizationFactor, random float64, currentInterval time.Duration) time.Duration {
|
||||
var delta = randomizationFactor * float64(currentInterval)
|
||||
var minInterval = float64(currentInterval) - delta
|
||||
var maxInterval = float64(currentInterval) + delta
|
||||
|
||||
// Get a random value from the range [minInterval, maxInterval].
|
||||
// The formula used below has a +1 because if the minInterval is 1 and the maxInterval is 3 then
|
||||
// we want a 33% chance for selecting either 1, 2 or 3.
|
||||
return time.Duration(minInterval + (random * (maxInterval - minInterval + 1)))
|
||||
}
|
108
Godeps/_workspace/src/github.com/cenkalti/backoff/exponential_test.go
generated
vendored
Normal file
108
Godeps/_workspace/src/github.com/cenkalti/backoff/exponential_test.go
generated
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
package backoff
|
||||
|
||||
import (
|
||||
"math"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestBackOff(t *testing.T) {
|
||||
var (
|
||||
testInitialInterval = 500 * time.Millisecond
|
||||
testRandomizationFactor = 0.1
|
||||
testMultiplier = 2.0
|
||||
testMaxInterval = 5 * time.Second
|
||||
testMaxElapsedTime = 15 * time.Minute
|
||||
)
|
||||
|
||||
exp := NewExponentialBackOff()
|
||||
exp.InitialInterval = testInitialInterval
|
||||
exp.RandomizationFactor = testRandomizationFactor
|
||||
exp.Multiplier = testMultiplier
|
||||
exp.MaxInterval = testMaxInterval
|
||||
exp.MaxElapsedTime = testMaxElapsedTime
|
||||
exp.Reset()
|
||||
|
||||
var expectedResults = []time.Duration{500, 1000, 2000, 4000, 5000, 5000, 5000, 5000, 5000, 5000}
|
||||
for i, d := range expectedResults {
|
||||
expectedResults[i] = d * time.Millisecond
|
||||
}
|
||||
|
||||
for _, expected := range expectedResults {
|
||||
assertEquals(t, expected, exp.currentInterval)
|
||||
// Assert that the next backoff falls in the expected range.
|
||||
var minInterval = expected - time.Duration(testRandomizationFactor*float64(expected))
|
||||
var maxInterval = expected + time.Duration(testRandomizationFactor*float64(expected))
|
||||
var actualInterval = exp.NextBackOff()
|
||||
if !(minInterval <= actualInterval && actualInterval <= maxInterval) {
|
||||
t.Error("error")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetRandomizedInterval(t *testing.T) {
|
||||
// 33% chance of being 1.
|
||||
assertEquals(t, 1, getRandomValueFromInterval(0.5, 0, 2))
|
||||
assertEquals(t, 1, getRandomValueFromInterval(0.5, 0.33, 2))
|
||||
// 33% chance of being 2.
|
||||
assertEquals(t, 2, getRandomValueFromInterval(0.5, 0.34, 2))
|
||||
assertEquals(t, 2, getRandomValueFromInterval(0.5, 0.66, 2))
|
||||
// 33% chance of being 3.
|
||||
assertEquals(t, 3, getRandomValueFromInterval(0.5, 0.67, 2))
|
||||
assertEquals(t, 3, getRandomValueFromInterval(0.5, 0.99, 2))
|
||||
}
|
||||
|
||||
type TestClock struct {
|
||||
i time.Duration
|
||||
start time.Time
|
||||
}
|
||||
|
||||
func (c *TestClock) Now() time.Time {
|
||||
t := c.start.Add(c.i)
|
||||
c.i += time.Second
|
||||
return t
|
||||
}
|
||||
|
||||
func TestGetElapsedTime(t *testing.T) {
|
||||
var exp = NewExponentialBackOff()
|
||||
exp.Clock = &TestClock{}
|
||||
exp.Reset()
|
||||
|
||||
var elapsedTime = exp.GetElapsedTime()
|
||||
if elapsedTime != time.Second {
|
||||
t.Errorf("elapsedTime=%d", elapsedTime)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMaxElapsedTime(t *testing.T) {
|
||||
var exp = NewExponentialBackOff()
|
||||
exp.Clock = &TestClock{start: time.Time{}.Add(10000 * time.Second)}
|
||||
// Change the currentElapsedTime to be 0 ensuring that the elapsed time will be greater
|
||||
// than the max elapsed time.
|
||||
exp.startTime = time.Time{}
|
||||
assertEquals(t, Stop, exp.NextBackOff())
|
||||
}
|
||||
|
||||
func TestBackOffOverflow(t *testing.T) {
|
||||
var (
|
||||
testInitialInterval time.Duration = math.MaxInt64 / 2
|
||||
testMaxInterval time.Duration = math.MaxInt64
|
||||
testMultiplier = 2.1
|
||||
)
|
||||
|
||||
exp := NewExponentialBackOff()
|
||||
exp.InitialInterval = testInitialInterval
|
||||
exp.Multiplier = testMultiplier
|
||||
exp.MaxInterval = testMaxInterval
|
||||
exp.Reset()
|
||||
|
||||
exp.NextBackOff()
|
||||
// Assert that when an overflow is possible the current varerval time.Duration is set to the max varerval time.Duration .
|
||||
assertEquals(t, testMaxInterval, exp.currentInterval)
|
||||
}
|
||||
|
||||
func assertEquals(t *testing.T, expected, value time.Duration) {
|
||||
if expected != value {
|
||||
t.Errorf("got: %d, expected: %d", value, expected)
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue