Update vendor

This commit is contained in:
Darren Shepherd 2020-05-06 22:09:19 -07:00
parent c580a8b528
commit f4ff2bf3a8
479 changed files with 66512 additions and 14512 deletions

10
go.mod
View File

@ -66,7 +66,6 @@ require (
github.com/bhendo/go-powershell v0.0.0-20190719160123-219e7fb4e41e // indirect
github.com/bronze1man/goStrongswanVici v0.0.0-20190828090544-27d02f80ba40 // indirect
github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23 // indirect
github.com/canonical/go-dqlite v1.5.1
github.com/containerd/cgroups v0.0.0-00010101000000-000000000000 // indirect
github.com/containerd/containerd v1.3.0-beta.2.0.20190828155532-0293cbd26c69
github.com/containerd/continuity v0.0.0-20190827140505-75bee3e2ccb6 // indirect
@ -81,18 +80,17 @@ require (
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f
github.com/docker/docker v1.4.2-0.20191205034852-d163fbba3c82
github.com/docker/go-metrics v0.0.1 // indirect
github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4 // indirect
github.com/go-bindata/go-bindata v3.1.2+incompatible
github.com/go-sql-driver/mysql v1.4.1
github.com/gogo/googleapis v1.3.0 // indirect
github.com/google/tcpproxy v0.0.0-20180808230851-dfa16c61dad2
github.com/google/uuid v1.1.1
github.com/gorilla/mux v1.7.3
github.com/gorilla/websocket v1.4.1
github.com/juju/errors v0.0.0-20190806202954-0232dcc7464d // indirect
github.com/juju/testing v0.0.0-20190723135506-ce30eb24acd2 // indirect
github.com/kubernetes-sigs/cri-tools v0.0.0-00010101000000-000000000000
github.com/lib/pq v1.1.1
github.com/lxc/lxd v0.0.0-20191108214106-60ea15630455
github.com/mattn/go-sqlite3 v1.13.0
github.com/natefinch/lumberjack v2.0.0+incompatible
github.com/opencontainers/runc v1.0.0-rc10
@ -110,14 +108,13 @@ require (
github.com/spf13/pflag v1.0.5
github.com/tchap/go-patricia v2.3.0+incompatible // indirect
github.com/urfave/cli v1.22.2
// e694b7bb0875 is v3.4.7
go.etcd.io/etcd v0.5.0-alpha.5.0.20200401174654-e694b7bb0875
// 54ba958 is v3.4.9
go.etcd.io/etcd v0.5.0-alpha.5.0.20200520232829-54ba9589114f
golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975
golang.org/x/net v0.0.0-20191204025024-5ee1b9f4859a
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e
google.golang.org/grpc v1.26.0
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22 // indirect
gopkg.in/robfig/cron.v2 v2.0.0-20150107220207-be2e0b0deed5 // indirect
gopkg.in/yaml.v2 v2.2.8
k8s.io/api v0.18.0
k8s.io/apimachinery v0.18.0
@ -128,4 +125,5 @@ require (
k8s.io/cri-api v0.0.0
k8s.io/klog v1.0.0
k8s.io/kubernetes v1.18.0
sigs.k8s.io/yaml v1.2.0
)

14
go.sum
View File

@ -207,8 +207,6 @@ github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8
github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc=
github.com/fatih/color v1.6.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4 h1:GY1+t5Dr9OKADM64SYnQjw/w99HMYvQ0A8/JoUkxVmc=
github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4/go.mod h1:T9YF2M40nIgbVgp3rreNmTged+9HrbNTIQf1PsaIiTA=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
@ -223,8 +221,6 @@ github.com/go-acme/lego v2.5.0+incompatible/go.mod h1:yzMNe9CasVUhkquNvti5nAtPmG
github.com/go-bindata/go-bindata v3.1.1+incompatible/go.mod h1:xK8Dsgwmeed+BBsSy2XTopBn/8uK2HWuGSnA11C3Joo=
github.com/go-bindata/go-bindata v3.1.2+incompatible h1:5vjJMVhowQdPzjE1LdxyFF7YFTXg5IgGVW4gBr5IbvE=
github.com/go-bindata/go-bindata v3.1.2+incompatible/go.mod h1:xK8Dsgwmeed+BBsSy2XTopBn/8uK2HWuGSnA11C3Joo=
github.com/go-check/check v0.0.0-20180628173108-788fd7840127 h1:0gkP6mzaMqkmpcJYCFOLkIBwI7xFExG03bbkOkCvUPI=
github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98=
github.com/go-critic/go-critic v0.3.5-0.20190526074819-1df300866540/go.mod h1:+sE8vrLDS2M0pZkBk0wy6+nLdKexVDrl/jBqQOTDThA=
github.com/go-lintpack/lintpack v0.5.2/go.mod h1:NwZuYi2nUHho8XEIZ6SIxihrnPoqBTDqfpXvXAN0sXM=
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
@ -446,13 +442,10 @@ github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/u
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/juju/errors v0.0.0-20180806074554-22422dad46e1/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q=
github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q=
github.com/juju/errors v0.0.0-20190806202954-0232dcc7464d h1:hJXjZMxj0SWlMoQkzeZDLi2cmeiWKa7y1B8Rg+qaoEc=
github.com/juju/errors v0.0.0-20190806202954-0232dcc7464d/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q=
github.com/juju/loggo v0.0.0-20180524022052-584905176618/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U=
github.com/juju/loggo v0.0.0-20190526231331-6e530bcce5d8 h1:UUHMLvzt/31azWTN/ifGWef4WUqvXk0iRqdhdy/2uzI=
github.com/juju/loggo v0.0.0-20190526231331-6e530bcce5d8/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U=
github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA=
github.com/juju/testing v0.0.0-20190613124551-e81189438503/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA=
github.com/juju/testing v0.0.0-20190723135506-ce30eb24acd2 h1:Pp8RxiF4rSoXP9SED26WCfNB28/dwTDpPXS8XMJR8rc=
github.com/juju/testing v0.0.0-20190723135506-ce30eb24acd2/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA=
@ -494,8 +487,6 @@ github.com/lucas-clemente/aes12 v0.0.0-20171027163421-cd47fb39b79f/go.mod h1:JpH
github.com/lucas-clemente/quic-clients v0.1.0/go.mod h1:y5xVIEoObKqULIKivu+gD/LU90pL73bTdtQjPBvtCBk=
github.com/lucas-clemente/quic-go v0.10.2/go.mod h1:hvaRS9IHjFLMq76puFJeWNfmn+H70QZ/CXoxqw9bzao=
github.com/lucas-clemente/quic-go-certificates v0.0.0-20160823095156-d2f86524cced/go.mod h1:NCcRLrOTZbzhZvixZLlERbJtDtYsmMw8Jc4vS8Z0g58=
github.com/lxc/lxd v0.0.0-20191108214106-60ea15630455 h1:gQQV7It0kjZxMLJkS/+5Mc6w0zM6pKGzl3OS0h2RHrY=
github.com/lxc/lxd v0.0.0-20191108214106-60ea15630455/go.mod h1:2BaZflfwsv8a3uy3/Vw+de4Avn4DSrAiqaHJjCIXMV4=
github.com/magiconair/properties v1.7.6/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
@ -814,6 +805,8 @@ go.etcd.io/bbolt v1.3.3 h1:MUGmc65QhB3pIlaQ5bB4LwqSj6GIonVJXpZiaKNyaKk=
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738 h1:VcrIfasaLFkyjk6KNlXQSzO+B0fZcnECiDrKJsfxka0=
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
go.etcd.io/etcd v0.5.0-alpha.5.0.20200520232829-54ba9589114f h1:pBCD+Z7cy5WPTq+R6MmJJvDRpn88cp7bmTypBsn91g4=
go.etcd.io/etcd v0.5.0-alpha.5.0.20200520232829-54ba9589114f/go.mod h1:skWido08r9w6Lq/w70DO5XYIKMu4QFu1+4VsqLQuJy8=
go.mongodb.org/mongo-driver v1.0.3/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
go.mongodb.org/mongo-driver v1.1.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
go.mongodb.org/mongo-driver v1.1.2 h1:jxcFYjlkl8xaERsgLo+RNquI0epW6zuy/ZRQs6jnrFA=
@ -948,7 +941,6 @@ golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGm
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190121143147-24cd39ecf745/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@ -1027,8 +1019,6 @@ gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3M
gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/robfig/cron.v2 v2.0.0-20150107220207-be2e0b0deed5 h1:E846t8CnR+lv5nE+VuiKTDG/v1U2stad0QzddfJC7kY=
gopkg.in/robfig/cron.v2 v2.0.0-20150107220207-be2e0b0deed5/go.mod h1:hiOFpYm0ZJbusNj2ywpbrXowU3G8U6GIQzqn2mw1UIE=
gopkg.in/square/go-jose.v2 v2.2.2 h1:orlkJ3myw8CN1nVQHBFfloD+L3egixIa4FvUP6RosSA=
gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=

5
vendor/github.com/coreos/go-semver/NOTICE generated vendored Normal file
View File

@ -0,0 +1,5 @@
CoreOS Project
Copyright 2018 CoreOS, Inc
This product includes software developed at CoreOS, Inc.
(http://www.coreos.com/).

296
vendor/github.com/coreos/go-semver/semver/semver.go generated vendored Normal file
View File

@ -0,0 +1,296 @@
// Copyright 2013-2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Semantic Versions http://semver.org
package semver
import (
"bytes"
"errors"
"fmt"
"regexp"
"strconv"
"strings"
)
type Version struct {
Major int64
Minor int64
Patch int64
PreRelease PreRelease
Metadata string
}
type PreRelease string
func splitOff(input *string, delim string) (val string) {
parts := strings.SplitN(*input, delim, 2)
if len(parts) == 2 {
*input = parts[0]
val = parts[1]
}
return val
}
func New(version string) *Version {
return Must(NewVersion(version))
}
func NewVersion(version string) (*Version, error) {
v := Version{}
if err := v.Set(version); err != nil {
return nil, err
}
return &v, nil
}
// Must is a helper for wrapping NewVersion and will panic if err is not nil.
func Must(v *Version, err error) *Version {
if err != nil {
panic(err)
}
return v
}
// Set parses and updates v from the given version string. Implements flag.Value
func (v *Version) Set(version string) error {
metadata := splitOff(&version, "+")
preRelease := PreRelease(splitOff(&version, "-"))
dotParts := strings.SplitN(version, ".", 3)
if len(dotParts) != 3 {
return fmt.Errorf("%s is not in dotted-tri format", version)
}
if err := validateIdentifier(string(preRelease)); err != nil {
return fmt.Errorf("failed to validate pre-release: %v", err)
}
if err := validateIdentifier(metadata); err != nil {
return fmt.Errorf("failed to validate metadata: %v", err)
}
parsed := make([]int64, 3, 3)
for i, v := range dotParts[:3] {
val, err := strconv.ParseInt(v, 10, 64)
parsed[i] = val
if err != nil {
return err
}
}
v.Metadata = metadata
v.PreRelease = preRelease
v.Major = parsed[0]
v.Minor = parsed[1]
v.Patch = parsed[2]
return nil
}
func (v Version) String() string {
var buffer bytes.Buffer
fmt.Fprintf(&buffer, "%d.%d.%d", v.Major, v.Minor, v.Patch)
if v.PreRelease != "" {
fmt.Fprintf(&buffer, "-%s", v.PreRelease)
}
if v.Metadata != "" {
fmt.Fprintf(&buffer, "+%s", v.Metadata)
}
return buffer.String()
}
func (v *Version) UnmarshalYAML(unmarshal func(interface{}) error) error {
var data string
if err := unmarshal(&data); err != nil {
return err
}
return v.Set(data)
}
func (v Version) MarshalJSON() ([]byte, error) {
return []byte(`"` + v.String() + `"`), nil
}
func (v *Version) UnmarshalJSON(data []byte) error {
l := len(data)
if l == 0 || string(data) == `""` {
return nil
}
if l < 2 || data[0] != '"' || data[l-1] != '"' {
return errors.New("invalid semver string")
}
return v.Set(string(data[1 : l-1]))
}
// Compare tests if v is less than, equal to, or greater than versionB,
// returning -1, 0, or +1 respectively.
func (v Version) Compare(versionB Version) int {
if cmp := recursiveCompare(v.Slice(), versionB.Slice()); cmp != 0 {
return cmp
}
return preReleaseCompare(v, versionB)
}
// Equal tests if v is equal to versionB.
func (v Version) Equal(versionB Version) bool {
return v.Compare(versionB) == 0
}
// LessThan tests if v is less than versionB.
func (v Version) LessThan(versionB Version) bool {
return v.Compare(versionB) < 0
}
// Slice converts the comparable parts of the semver into a slice of integers.
func (v Version) Slice() []int64 {
return []int64{v.Major, v.Minor, v.Patch}
}
func (p PreRelease) Slice() []string {
preRelease := string(p)
return strings.Split(preRelease, ".")
}
func preReleaseCompare(versionA Version, versionB Version) int {
a := versionA.PreRelease
b := versionB.PreRelease
/* Handle the case where if two versions are otherwise equal it is the
* one without a PreRelease that is greater */
if len(a) == 0 && (len(b) > 0) {
return 1
} else if len(b) == 0 && (len(a) > 0) {
return -1
}
// If there is a prerelease, check and compare each part.
return recursivePreReleaseCompare(a.Slice(), b.Slice())
}
func recursiveCompare(versionA []int64, versionB []int64) int {
if len(versionA) == 0 {
return 0
}
a := versionA[0]
b := versionB[0]
if a > b {
return 1
} else if a < b {
return -1
}
return recursiveCompare(versionA[1:], versionB[1:])
}
func recursivePreReleaseCompare(versionA []string, versionB []string) int {
// A larger set of pre-release fields has a higher precedence than a smaller set,
// if all of the preceding identifiers are equal.
if len(versionA) == 0 {
if len(versionB) > 0 {
return -1
}
return 0
} else if len(versionB) == 0 {
// We're longer than versionB so return 1.
return 1
}
a := versionA[0]
b := versionB[0]
aInt := false
bInt := false
aI, err := strconv.Atoi(versionA[0])
if err == nil {
aInt = true
}
bI, err := strconv.Atoi(versionB[0])
if err == nil {
bInt = true
}
// Numeric identifiers always have lower precedence than non-numeric identifiers.
if aInt && !bInt {
return -1
} else if !aInt && bInt {
return 1
}
// Handle Integer Comparison
if aInt && bInt {
if aI > bI {
return 1
} else if aI < bI {
return -1
}
}
// Handle String Comparison
if a > b {
return 1
} else if a < b {
return -1
}
return recursivePreReleaseCompare(versionA[1:], versionB[1:])
}
// BumpMajor increments the Major field by 1 and resets all other fields to their default values
func (v *Version) BumpMajor() {
v.Major += 1
v.Minor = 0
v.Patch = 0
v.PreRelease = PreRelease("")
v.Metadata = ""
}
// BumpMinor increments the Minor field by 1 and resets all other fields to their default values
func (v *Version) BumpMinor() {
v.Minor += 1
v.Patch = 0
v.PreRelease = PreRelease("")
v.Metadata = ""
}
// BumpPatch increments the Patch field by 1 and resets all other fields to their default values
func (v *Version) BumpPatch() {
v.Patch += 1
v.PreRelease = PreRelease("")
v.Metadata = ""
}
// validateIdentifier makes sure the provided identifier satisfies semver spec
func validateIdentifier(id string) error {
if id != "" && !reIdentifier.MatchString(id) {
return fmt.Errorf("%s is not a valid semver identifier", id)
}
return nil
}
// reIdentifier is a regular expression used to check that pre-release and metadata
// identifiers satisfy the spec requirements
var reIdentifier = regexp.MustCompile(`^[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*$`)

38
vendor/github.com/coreos/go-semver/semver/sort.go generated vendored Normal file
View File

@ -0,0 +1,38 @@
// Copyright 2013-2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package semver
import (
"sort"
)
type Versions []*Version
func (s Versions) Len() int {
return len(s)
}
func (s Versions) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
func (s Versions) Less(i, j int) bool {
return s[i].LessThan(*s[j])
}
// Sort sorts the given slice of Version
func Sort(versions []*Version) {
sort.Sort(Versions(versions))
}

21
vendor/github.com/dustin/go-humanize/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,21 @@
sudo: false
language: go
go:
- 1.3.x
- 1.5.x
- 1.6.x
- 1.7.x
- 1.8.x
- 1.9.x
- master
matrix:
allow_failures:
- go: master
fast_finish: true
install:
- # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step).
script:
- go get -t -v ./...
- diff -u <(echo -n) <(gofmt -d -s .)
- go tool vet .
- go test -v -race ./...

21
vendor/github.com/dustin/go-humanize/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
Copyright (c) 2005-2008 Dustin Sallings <dustin@spy.net>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
<http://www.opensource.org/licenses/mit-license.php>

124
vendor/github.com/dustin/go-humanize/README.markdown generated vendored Normal file
View File

@ -0,0 +1,124 @@
# Humane Units [![Build Status](https://travis-ci.org/dustin/go-humanize.svg?branch=master)](https://travis-ci.org/dustin/go-humanize) [![GoDoc](https://godoc.org/github.com/dustin/go-humanize?status.svg)](https://godoc.org/github.com/dustin/go-humanize)
Just a few functions for helping humanize times and sizes.
`go get` it as `github.com/dustin/go-humanize`, import it as
`"github.com/dustin/go-humanize"`, use it as `humanize`.
See [godoc](https://godoc.org/github.com/dustin/go-humanize) for
complete documentation.
## Sizes
This lets you take numbers like `82854982` and convert them to useful
strings like, `83 MB` or `79 MiB` (whichever you prefer).
Example:
```go
fmt.Printf("That file is %s.", humanize.Bytes(82854982)) // That file is 83 MB.
```
## Times
This lets you take a `time.Time` and spit it out in relative terms.
For example, `12 seconds ago` or `3 days from now`.
Example:
```go
fmt.Printf("This was touched %s.", humanize.Time(someTimeInstance)) // This was touched 7 hours ago.
```
Thanks to Kyle Lemons for the time implementation from an IRC
conversation one day. It's pretty neat.
## Ordinals
From a [mailing list discussion][odisc] where a user wanted to be able
to label ordinals.
0 -> 0th
1 -> 1st
2 -> 2nd
3 -> 3rd
4 -> 4th
[...]
Example:
```go
fmt.Printf("You're my %s best friend.", humanize.Ordinal(193)) // You are my 193rd best friend.
```
## Commas
Want to shove commas into numbers? Be my guest.
0 -> 0
100 -> 100
1000 -> 1,000
1000000000 -> 1,000,000,000
-100000 -> -100,000
Example:
```go
fmt.Printf("You owe $%s.\n", humanize.Comma(6582491)) // You owe $6,582,491.
```
## Ftoa
Nicer float64 formatter that removes trailing zeros.
```go
fmt.Printf("%f", 2.24) // 2.240000
fmt.Printf("%s", humanize.Ftoa(2.24)) // 2.24
fmt.Printf("%f", 2.0) // 2.000000
fmt.Printf("%s", humanize.Ftoa(2.0)) // 2
```
## SI notation
Format numbers with [SI notation][sinotation].
Example:
```go
humanize.SI(0.00000000223, "M") // 2.23 nM
```
## English-specific functions
The following functions are in the `humanize/english` subpackage.
### Plurals
Simple English pluralization
```go
english.PluralWord(1, "object", "") // object
english.PluralWord(42, "object", "") // objects
english.PluralWord(2, "bus", "") // buses
english.PluralWord(99, "locus", "loci") // loci
english.Plural(1, "object", "") // 1 object
english.Plural(42, "object", "") // 42 objects
english.Plural(2, "bus", "") // 2 buses
english.Plural(99, "locus", "loci") // 99 loci
```
### Word series
Format comma-separated words lists with conjuctions:
```go
english.WordSeries([]string{"foo"}, "and") // foo
english.WordSeries([]string{"foo", "bar"}, "and") // foo and bar
english.WordSeries([]string{"foo", "bar", "baz"}, "and") // foo, bar and baz
english.OxfordWordSeries([]string{"foo", "bar", "baz"}, "and") // foo, bar, and baz
```
[odisc]: https://groups.google.com/d/topic/golang-nuts/l8NhI74jl-4/discussion
[sinotation]: http://en.wikipedia.org/wiki/Metric_prefix

31
vendor/github.com/dustin/go-humanize/big.go generated vendored Normal file
View File

@ -0,0 +1,31 @@
package humanize
import (
"math/big"
)
// order of magnitude (to a max order)
func oomm(n, b *big.Int, maxmag int) (float64, int) {
mag := 0
m := &big.Int{}
for n.Cmp(b) >= 0 {
n.DivMod(n, b, m)
mag++
if mag == maxmag && maxmag >= 0 {
break
}
}
return float64(n.Int64()) + (float64(m.Int64()) / float64(b.Int64())), mag
}
// total order of magnitude
// (same as above, but with no upper limit)
func oom(n, b *big.Int) (float64, int) {
mag := 0
m := &big.Int{}
for n.Cmp(b) >= 0 {
n.DivMod(n, b, m)
mag++
}
return float64(n.Int64()) + (float64(m.Int64()) / float64(b.Int64())), mag
}

173
vendor/github.com/dustin/go-humanize/bigbytes.go generated vendored Normal file
View File

@ -0,0 +1,173 @@
package humanize
import (
"fmt"
"math/big"
"strings"
"unicode"
)
var (
bigIECExp = big.NewInt(1024)
// BigByte is one byte in bit.Ints
BigByte = big.NewInt(1)
// BigKiByte is 1,024 bytes in bit.Ints
BigKiByte = (&big.Int{}).Mul(BigByte, bigIECExp)
// BigMiByte is 1,024 k bytes in bit.Ints
BigMiByte = (&big.Int{}).Mul(BigKiByte, bigIECExp)
// BigGiByte is 1,024 m bytes in bit.Ints
BigGiByte = (&big.Int{}).Mul(BigMiByte, bigIECExp)
// BigTiByte is 1,024 g bytes in bit.Ints
BigTiByte = (&big.Int{}).Mul(BigGiByte, bigIECExp)
// BigPiByte is 1,024 t bytes in bit.Ints
BigPiByte = (&big.Int{}).Mul(BigTiByte, bigIECExp)
// BigEiByte is 1,024 p bytes in bit.Ints
BigEiByte = (&big.Int{}).Mul(BigPiByte, bigIECExp)
// BigZiByte is 1,024 e bytes in bit.Ints
BigZiByte = (&big.Int{}).Mul(BigEiByte, bigIECExp)
// BigYiByte is 1,024 z bytes in bit.Ints
BigYiByte = (&big.Int{}).Mul(BigZiByte, bigIECExp)
)
var (
bigSIExp = big.NewInt(1000)
// BigSIByte is one SI byte in big.Ints
BigSIByte = big.NewInt(1)
// BigKByte is 1,000 SI bytes in big.Ints
BigKByte = (&big.Int{}).Mul(BigSIByte, bigSIExp)
// BigMByte is 1,000 SI k bytes in big.Ints
BigMByte = (&big.Int{}).Mul(BigKByte, bigSIExp)
// BigGByte is 1,000 SI m bytes in big.Ints
BigGByte = (&big.Int{}).Mul(BigMByte, bigSIExp)
// BigTByte is 1,000 SI g bytes in big.Ints
BigTByte = (&big.Int{}).Mul(BigGByte, bigSIExp)
// BigPByte is 1,000 SI t bytes in big.Ints
BigPByte = (&big.Int{}).Mul(BigTByte, bigSIExp)
// BigEByte is 1,000 SI p bytes in big.Ints
BigEByte = (&big.Int{}).Mul(BigPByte, bigSIExp)
// BigZByte is 1,000 SI e bytes in big.Ints
BigZByte = (&big.Int{}).Mul(BigEByte, bigSIExp)
// BigYByte is 1,000 SI z bytes in big.Ints
BigYByte = (&big.Int{}).Mul(BigZByte, bigSIExp)
)
var bigBytesSizeTable = map[string]*big.Int{
"b": BigByte,
"kib": BigKiByte,
"kb": BigKByte,
"mib": BigMiByte,
"mb": BigMByte,
"gib": BigGiByte,
"gb": BigGByte,
"tib": BigTiByte,
"tb": BigTByte,
"pib": BigPiByte,
"pb": BigPByte,
"eib": BigEiByte,
"eb": BigEByte,
"zib": BigZiByte,
"zb": BigZByte,
"yib": BigYiByte,
"yb": BigYByte,
// Without suffix
"": BigByte,
"ki": BigKiByte,
"k": BigKByte,
"mi": BigMiByte,
"m": BigMByte,
"gi": BigGiByte,
"g": BigGByte,
"ti": BigTiByte,
"t": BigTByte,
"pi": BigPiByte,
"p": BigPByte,
"ei": BigEiByte,
"e": BigEByte,
"z": BigZByte,
"zi": BigZiByte,
"y": BigYByte,
"yi": BigYiByte,
}
var ten = big.NewInt(10)
func humanateBigBytes(s, base *big.Int, sizes []string) string {
if s.Cmp(ten) < 0 {
return fmt.Sprintf("%d B", s)
}
c := (&big.Int{}).Set(s)
val, mag := oomm(c, base, len(sizes)-1)
suffix := sizes[mag]
f := "%.0f %s"
if val < 10 {
f = "%.1f %s"
}
return fmt.Sprintf(f, val, suffix)
}
// BigBytes produces a human readable representation of an SI size.
//
// See also: ParseBigBytes.
//
// BigBytes(82854982) -> 83 MB
func BigBytes(s *big.Int) string {
sizes := []string{"B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"}
return humanateBigBytes(s, bigSIExp, sizes)
}
// BigIBytes produces a human readable representation of an IEC size.
//
// See also: ParseBigBytes.
//
// BigIBytes(82854982) -> 79 MiB
func BigIBytes(s *big.Int) string {
sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"}
return humanateBigBytes(s, bigIECExp, sizes)
}
// ParseBigBytes parses a string representation of bytes into the number
// of bytes it represents.
//
// See also: BigBytes, BigIBytes.
//
// ParseBigBytes("42 MB") -> 42000000, nil
// ParseBigBytes("42 mib") -> 44040192, nil
func ParseBigBytes(s string) (*big.Int, error) {
lastDigit := 0
hasComma := false
for _, r := range s {
if !(unicode.IsDigit(r) || r == '.' || r == ',') {
break
}
if r == ',' {
hasComma = true
}
lastDigit++
}
num := s[:lastDigit]
if hasComma {
num = strings.Replace(num, ",", "", -1)
}
val := &big.Rat{}
_, err := fmt.Sscanf(num, "%f", val)
if err != nil {
return nil, err
}
extra := strings.ToLower(strings.TrimSpace(s[lastDigit:]))
if m, ok := bigBytesSizeTable[extra]; ok {
mv := (&big.Rat{}).SetInt(m)
val.Mul(val, mv)
rv := &big.Int{}
rv.Div(val.Num(), val.Denom())
return rv, nil
}
return nil, fmt.Errorf("unhandled size name: %v", extra)
}

143
vendor/github.com/dustin/go-humanize/bytes.go generated vendored Normal file
View File

@ -0,0 +1,143 @@
package humanize
import (
"fmt"
"math"
"strconv"
"strings"
"unicode"
)
// IEC Sizes.
// kibis of bits
const (
Byte = 1 << (iota * 10)
KiByte
MiByte
GiByte
TiByte
PiByte
EiByte
)
// SI Sizes.
const (
IByte = 1
KByte = IByte * 1000
MByte = KByte * 1000
GByte = MByte * 1000
TByte = GByte * 1000
PByte = TByte * 1000
EByte = PByte * 1000
)
var bytesSizeTable = map[string]uint64{
"b": Byte,
"kib": KiByte,
"kb": KByte,
"mib": MiByte,
"mb": MByte,
"gib": GiByte,
"gb": GByte,
"tib": TiByte,
"tb": TByte,
"pib": PiByte,
"pb": PByte,
"eib": EiByte,
"eb": EByte,
// Without suffix
"": Byte,
"ki": KiByte,
"k": KByte,
"mi": MiByte,
"m": MByte,
"gi": GiByte,
"g": GByte,
"ti": TiByte,
"t": TByte,
"pi": PiByte,
"p": PByte,
"ei": EiByte,
"e": EByte,
}
func logn(n, b float64) float64 {
return math.Log(n) / math.Log(b)
}
func humanateBytes(s uint64, base float64, sizes []string) string {
if s < 10 {
return fmt.Sprintf("%d B", s)
}
e := math.Floor(logn(float64(s), base))
suffix := sizes[int(e)]
val := math.Floor(float64(s)/math.Pow(base, e)*10+0.5) / 10
f := "%.0f %s"
if val < 10 {
f = "%.1f %s"
}
return fmt.Sprintf(f, val, suffix)
}
// Bytes produces a human readable representation of an SI size.
//
// See also: ParseBytes.
//
// Bytes(82854982) -> 83 MB
func Bytes(s uint64) string {
sizes := []string{"B", "kB", "MB", "GB", "TB", "PB", "EB"}
return humanateBytes(s, 1000, sizes)
}
// IBytes produces a human readable representation of an IEC size.
//
// See also: ParseBytes.
//
// IBytes(82854982) -> 79 MiB
func IBytes(s uint64) string {
sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB"}
return humanateBytes(s, 1024, sizes)
}
// ParseBytes parses a string representation of bytes into the number
// of bytes it represents.
//
// See Also: Bytes, IBytes.
//
// ParseBytes("42 MB") -> 42000000, nil
// ParseBytes("42 mib") -> 44040192, nil
func ParseBytes(s string) (uint64, error) {
lastDigit := 0
hasComma := false
for _, r := range s {
if !(unicode.IsDigit(r) || r == '.' || r == ',') {
break
}
if r == ',' {
hasComma = true
}
lastDigit++
}
num := s[:lastDigit]
if hasComma {
num = strings.Replace(num, ",", "", -1)
}
f, err := strconv.ParseFloat(num, 64)
if err != nil {
return 0, err
}
extra := strings.ToLower(strings.TrimSpace(s[lastDigit:]))
if m, ok := bytesSizeTable[extra]; ok {
f *= float64(m)
if f >= math.MaxUint64 {
return 0, fmt.Errorf("too large: %v", s)
}
return uint64(f), nil
}
return 0, fmt.Errorf("unhandled size name: %v", extra)
}

116
vendor/github.com/dustin/go-humanize/comma.go generated vendored Normal file
View File

@ -0,0 +1,116 @@
package humanize
import (
"bytes"
"math"
"math/big"
"strconv"
"strings"
)
// Comma produces a string form of the given number in base 10 with
// commas after every three orders of magnitude.
//
// e.g. Comma(834142) -> 834,142
func Comma(v int64) string {
sign := ""
// Min int64 can't be negated to a usable value, so it has to be special cased.
if v == math.MinInt64 {
return "-9,223,372,036,854,775,808"
}
if v < 0 {
sign = "-"
v = 0 - v
}
parts := []string{"", "", "", "", "", "", ""}
j := len(parts) - 1
for v > 999 {
parts[j] = strconv.FormatInt(v%1000, 10)
switch len(parts[j]) {
case 2:
parts[j] = "0" + parts[j]
case 1:
parts[j] = "00" + parts[j]
}
v = v / 1000
j--
}
parts[j] = strconv.Itoa(int(v))
return sign + strings.Join(parts[j:], ",")
}
// Commaf produces a string form of the given number in base 10 with
// commas after every three orders of magnitude.
//
// e.g. Commaf(834142.32) -> 834,142.32
func Commaf(v float64) string {
buf := &bytes.Buffer{}
if v < 0 {
buf.Write([]byte{'-'})
v = 0 - v
}
comma := []byte{','}
parts := strings.Split(strconv.FormatFloat(v, 'f', -1, 64), ".")
pos := 0
if len(parts[0])%3 != 0 {
pos += len(parts[0]) % 3
buf.WriteString(parts[0][:pos])
buf.Write(comma)
}
for ; pos < len(parts[0]); pos += 3 {
buf.WriteString(parts[0][pos : pos+3])
buf.Write(comma)
}
buf.Truncate(buf.Len() - 1)
if len(parts) > 1 {
buf.Write([]byte{'.'})
buf.WriteString(parts[1])
}
return buf.String()
}
// CommafWithDigits works like the Commaf but limits the resulting
// string to the given number of decimal places.
//
// e.g. CommafWithDigits(834142.32, 1) -> 834,142.3
func CommafWithDigits(f float64, decimals int) string {
return stripTrailingDigits(Commaf(f), decimals)
}
// BigComma produces a string form of the given big.Int in base 10
// with commas after every three orders of magnitude.
func BigComma(b *big.Int) string {
sign := ""
if b.Sign() < 0 {
sign = "-"
b.Abs(b)
}
athousand := big.NewInt(1000)
c := (&big.Int{}).Set(b)
_, m := oom(c, athousand)
parts := make([]string, m+1)
j := len(parts) - 1
mod := &big.Int{}
for b.Cmp(athousand) >= 0 {
b.DivMod(b, athousand, mod)
parts[j] = strconv.FormatInt(mod.Int64(), 10)
switch len(parts[j]) {
case 2:
parts[j] = "0" + parts[j]
case 1:
parts[j] = "00" + parts[j]
}
j--
}
parts[j] = strconv.Itoa(int(b.Int64()))
return sign + strings.Join(parts[j:], ",")
}

40
vendor/github.com/dustin/go-humanize/commaf.go generated vendored Normal file
View File

@ -0,0 +1,40 @@
// +build go1.6
package humanize
import (
"bytes"
"math/big"
"strings"
)
// BigCommaf produces a string form of the given big.Float in base 10
// with commas after every three orders of magnitude.
func BigCommaf(v *big.Float) string {
buf := &bytes.Buffer{}
if v.Sign() < 0 {
buf.Write([]byte{'-'})
v.Abs(v)
}
comma := []byte{','}
parts := strings.Split(v.Text('f', -1), ".")
pos := 0
if len(parts[0])%3 != 0 {
pos += len(parts[0]) % 3
buf.WriteString(parts[0][:pos])
buf.Write(comma)
}
for ; pos < len(parts[0]); pos += 3 {
buf.WriteString(parts[0][pos : pos+3])
buf.Write(comma)
}
buf.Truncate(buf.Len() - 1)
if len(parts) > 1 {
buf.Write([]byte{'.'})
buf.WriteString(parts[1])
}
return buf.String()
}

46
vendor/github.com/dustin/go-humanize/ftoa.go generated vendored Normal file
View File

@ -0,0 +1,46 @@
package humanize
import (
"strconv"
"strings"
)
func stripTrailingZeros(s string) string {
offset := len(s) - 1
for offset > 0 {
if s[offset] == '.' {
offset--
break
}
if s[offset] != '0' {
break
}
offset--
}
return s[:offset+1]
}
func stripTrailingDigits(s string, digits int) string {
if i := strings.Index(s, "."); i >= 0 {
if digits <= 0 {
return s[:i]
}
i++
if i+digits >= len(s) {
return s
}
return s[:i+digits]
}
return s
}
// Ftoa converts a float to a string with no trailing zeros.
func Ftoa(num float64) string {
return stripTrailingZeros(strconv.FormatFloat(num, 'f', 6, 64))
}
// FtoaWithDigits converts a float to a string but limits the resulting string
// to the given number of decimal places, and no trailing zeros.
func FtoaWithDigits(num float64, digits int) string {
return stripTrailingZeros(stripTrailingDigits(strconv.FormatFloat(num, 'f', 6, 64), digits))
}

8
vendor/github.com/dustin/go-humanize/humanize.go generated vendored Normal file
View File

@ -0,0 +1,8 @@
/*
Package humanize converts boring ugly numbers to human-friendly strings and back.
Durations can be turned into strings such as "3 days ago", numbers
representing sizes like 82854982 into useful strings like, "83 MB" or
"79 MiB" (whichever you prefer).
*/
package humanize

192
vendor/github.com/dustin/go-humanize/number.go generated vendored Normal file
View File

@ -0,0 +1,192 @@
package humanize
/*
Slightly adapted from the source to fit go-humanize.
Author: https://github.com/gorhill
Source: https://gist.github.com/gorhill/5285193
*/
import (
"math"
"strconv"
)
var (
renderFloatPrecisionMultipliers = [...]float64{
1,
10,
100,
1000,
10000,
100000,
1000000,
10000000,
100000000,
1000000000,
}
renderFloatPrecisionRounders = [...]float64{
0.5,
0.05,
0.005,
0.0005,
0.00005,
0.000005,
0.0000005,
0.00000005,
0.000000005,
0.0000000005,
}
)
// FormatFloat produces a formatted number as string based on the following user-specified criteria:
// * thousands separator
// * decimal separator
// * decimal precision
//
// Usage: s := RenderFloat(format, n)
// The format parameter tells how to render the number n.
//
// See examples: http://play.golang.org/p/LXc1Ddm1lJ
//
// Examples of format strings, given n = 12345.6789:
// "#,###.##" => "12,345.67"
// "#,###." => "12,345"
// "#,###" => "12345,678"
// "#\u202F###,##" => "12345,68"
// "#.###,###### => 12.345,678900
// "" (aka default format) => 12,345.67
//
// The highest precision allowed is 9 digits after the decimal symbol.
// There is also a version for integer number, FormatInteger(),
// which is convenient for calls within template.
func FormatFloat(format string, n float64) string {
// Special cases:
// NaN = "NaN"
// +Inf = "+Infinity"
// -Inf = "-Infinity"
if math.IsNaN(n) {
return "NaN"
}
if n > math.MaxFloat64 {
return "Infinity"
}
if n < -math.MaxFloat64 {
return "-Infinity"
}
// default format
precision := 2
decimalStr := "."
thousandStr := ","
positiveStr := ""
negativeStr := "-"
if len(format) > 0 {
format := []rune(format)
// If there is an explicit format directive,
// then default values are these:
precision = 9
thousandStr = ""
// collect indices of meaningful formatting directives
formatIndx := []int{}
for i, char := range format {
if char != '#' && char != '0' {
formatIndx = append(formatIndx, i)
}
}
if len(formatIndx) > 0 {
// Directive at index 0:
// Must be a '+'
// Raise an error if not the case
// index: 0123456789
// +0.000,000
// +000,000.0
// +0000.00
// +0000
if formatIndx[0] == 0 {
if format[formatIndx[0]] != '+' {
panic("RenderFloat(): invalid positive sign directive")
}
positiveStr = "+"
formatIndx = formatIndx[1:]
}
// Two directives:
// First is thousands separator
// Raise an error if not followed by 3-digit
// 0123456789
// 0.000,000
// 000,000.00
if len(formatIndx) == 2 {
if (formatIndx[1] - formatIndx[0]) != 4 {
panic("RenderFloat(): thousands separator directive must be followed by 3 digit-specifiers")
}
thousandStr = string(format[formatIndx[0]])
formatIndx = formatIndx[1:]
}
// One directive:
// Directive is decimal separator
// The number of digit-specifier following the separator indicates wanted precision
// 0123456789
// 0.00
// 000,0000
if len(formatIndx) == 1 {
decimalStr = string(format[formatIndx[0]])
precision = len(format) - formatIndx[0] - 1
}
}
}
// generate sign part
var signStr string
if n >= 0.000000001 {
signStr = positiveStr
} else if n <= -0.000000001 {
signStr = negativeStr
n = -n
} else {
signStr = ""
n = 0.0
}
// split number into integer and fractional parts
intf, fracf := math.Modf(n + renderFloatPrecisionRounders[precision])
// generate integer part string
intStr := strconv.FormatInt(int64(intf), 10)
// add thousand separator if required
if len(thousandStr) > 0 {
for i := len(intStr); i > 3; {
i -= 3
intStr = intStr[:i] + thousandStr + intStr[i:]
}
}
// no fractional part, we can leave now
if precision == 0 {
return signStr + intStr
}
// generate fractional part
fracStr := strconv.Itoa(int(fracf * renderFloatPrecisionMultipliers[precision]))
// may need padding
if len(fracStr) < precision {
fracStr = "000000000000000"[:precision-len(fracStr)] + fracStr
}
return signStr + intStr + decimalStr + fracStr
}
// FormatInteger produces a formatted number as string.
// See FormatFloat.
func FormatInteger(format string, n int) string {
return FormatFloat(format, float64(n))
}

25
vendor/github.com/dustin/go-humanize/ordinals.go generated vendored Normal file
View File

@ -0,0 +1,25 @@
package humanize
import "strconv"
// Ordinal gives you the input number in a rank/ordinal format.
//
// Ordinal(3) -> 3rd
func Ordinal(x int) string {
suffix := "th"
switch x % 10 {
case 1:
if x%100 != 11 {
suffix = "st"
}
case 2:
if x%100 != 12 {
suffix = "nd"
}
case 3:
if x%100 != 13 {
suffix = "rd"
}
}
return strconv.Itoa(x) + suffix
}

123
vendor/github.com/dustin/go-humanize/si.go generated vendored Normal file
View File

@ -0,0 +1,123 @@
package humanize
import (
"errors"
"math"
"regexp"
"strconv"
)
var siPrefixTable = map[float64]string{
-24: "y", // yocto
-21: "z", // zepto
-18: "a", // atto
-15: "f", // femto
-12: "p", // pico
-9: "n", // nano
-6: "µ", // micro
-3: "m", // milli
0: "",
3: "k", // kilo
6: "M", // mega
9: "G", // giga
12: "T", // tera
15: "P", // peta
18: "E", // exa
21: "Z", // zetta
24: "Y", // yotta
}
var revSIPrefixTable = revfmap(siPrefixTable)
// revfmap reverses the map and precomputes the power multiplier
func revfmap(in map[float64]string) map[string]float64 {
rv := map[string]float64{}
for k, v := range in {
rv[v] = math.Pow(10, k)
}
return rv
}
var riParseRegex *regexp.Regexp
func init() {
ri := `^([\-0-9.]+)\s?([`
for _, v := range siPrefixTable {
ri += v
}
ri += `]?)(.*)`
riParseRegex = regexp.MustCompile(ri)
}
// ComputeSI finds the most appropriate SI prefix for the given number
// and returns the prefix along with the value adjusted to be within
// that prefix.
//
// See also: SI, ParseSI.
//
// e.g. ComputeSI(2.2345e-12) -> (2.2345, "p")
func ComputeSI(input float64) (float64, string) {
if input == 0 {
return 0, ""
}
mag := math.Abs(input)
exponent := math.Floor(logn(mag, 10))
exponent = math.Floor(exponent/3) * 3
value := mag / math.Pow(10, exponent)
// Handle special case where value is exactly 1000.0
// Should return 1 M instead of 1000 k
if value == 1000.0 {
exponent += 3
value = mag / math.Pow(10, exponent)
}
value = math.Copysign(value, input)
prefix := siPrefixTable[exponent]
return value, prefix
}
// SI returns a string with default formatting.
//
// SI uses Ftoa to format float value, removing trailing zeros.
//
// See also: ComputeSI, ParseSI.
//
// e.g. SI(1000000, "B") -> 1 MB
// e.g. SI(2.2345e-12, "F") -> 2.2345 pF
func SI(input float64, unit string) string {
value, prefix := ComputeSI(input)
return Ftoa(value) + " " + prefix + unit
}
// SIWithDigits works like SI but limits the resulting string to the
// given number of decimal places.
//
// e.g. SIWithDigits(1000000, 0, "B") -> 1 MB
// e.g. SIWithDigits(2.2345e-12, 2, "F") -> 2.23 pF
func SIWithDigits(input float64, decimals int, unit string) string {
value, prefix := ComputeSI(input)
return FtoaWithDigits(value, decimals) + " " + prefix + unit
}
var errInvalid = errors.New("invalid input")
// ParseSI parses an SI string back into the number and unit.
//
// See also: SI, ComputeSI.
//
// e.g. ParseSI("2.2345 pF") -> (2.2345e-12, "F", nil)
func ParseSI(input string) (float64, string, error) {
found := riParseRegex.FindStringSubmatch(input)
if len(found) != 4 {
return 0, "", errInvalid
}
mag := revSIPrefixTable[found[2]]
unit := found[3]
base, err := strconv.ParseFloat(found[1], 64)
return base * mag, unit, err
}

117
vendor/github.com/dustin/go-humanize/times.go generated vendored Normal file
View File

@ -0,0 +1,117 @@
package humanize
import (
"fmt"
"math"
"sort"
"time"
)
// Seconds-based time units
const (
Day = 24 * time.Hour
Week = 7 * Day
Month = 30 * Day
Year = 12 * Month
LongTime = 37 * Year
)
// Time formats a time into a relative string.
//
// Time(someT) -> "3 weeks ago"
func Time(then time.Time) string {
return RelTime(then, time.Now(), "ago", "from now")
}
// A RelTimeMagnitude struct contains a relative time point at which
// the relative format of time will switch to a new format string. A
// slice of these in ascending order by their "D" field is passed to
// CustomRelTime to format durations.
//
// The Format field is a string that may contain a "%s" which will be
// replaced with the appropriate signed label (e.g. "ago" or "from
// now") and a "%d" that will be replaced by the quantity.
//
// The DivBy field is the amount of time the time difference must be
// divided by in order to display correctly.
//
// e.g. if D is 2*time.Minute and you want to display "%d minutes %s"
// DivBy should be time.Minute so whatever the duration is will be
// expressed in minutes.
type RelTimeMagnitude struct {
D time.Duration
Format string
DivBy time.Duration
}
var defaultMagnitudes = []RelTimeMagnitude{
{time.Second, "now", time.Second},
{2 * time.Second, "1 second %s", 1},
{time.Minute, "%d seconds %s", time.Second},
{2 * time.Minute, "1 minute %s", 1},
{time.Hour, "%d minutes %s", time.Minute},
{2 * time.Hour, "1 hour %s", 1},
{Day, "%d hours %s", time.Hour},
{2 * Day, "1 day %s", 1},
{Week, "%d days %s", Day},
{2 * Week, "1 week %s", 1},
{Month, "%d weeks %s", Week},
{2 * Month, "1 month %s", 1},
{Year, "%d months %s", Month},
{18 * Month, "1 year %s", 1},
{2 * Year, "2 years %s", 1},
{LongTime, "%d years %s", Year},
{math.MaxInt64, "a long while %s", 1},
}
// RelTime formats a time into a relative string.
//
// It takes two times and two labels. In addition to the generic time
// delta string (e.g. 5 minutes), the labels are used applied so that
// the label corresponding to the smaller time is applied.
//
// RelTime(timeInPast, timeInFuture, "earlier", "later") -> "3 weeks earlier"
func RelTime(a, b time.Time, albl, blbl string) string {
return CustomRelTime(a, b, albl, blbl, defaultMagnitudes)
}
// CustomRelTime formats a time into a relative string.
//
// It takes two times two labels and a table of relative time formats.
// In addition to the generic time delta string (e.g. 5 minutes), the
// labels are used applied so that the label corresponding to the
// smaller time is applied.
func CustomRelTime(a, b time.Time, albl, blbl string, magnitudes []RelTimeMagnitude) string {
lbl := albl
diff := b.Sub(a)
if a.After(b) {
lbl = blbl
diff = a.Sub(b)
}
n := sort.Search(len(magnitudes), func(i int) bool {
return magnitudes[i].D > diff
})
if n >= len(magnitudes) {
n = len(magnitudes) - 1
}
mag := magnitudes[n]
args := []interface{}{}
escaped := false
for _, ch := range mag.Format {
if escaped {
switch ch {
case 's':
args = append(args, lbl)
case 'd':
args = append(args, diff/mag.DivBy)
}
escaped = false
} else {
escaped = ch == '%'
}
}
return fmt.Sprintf(mag.Format, args...)
}

View File

@ -1 +0,0 @@
* text eol=lf

View File

@ -1,8 +0,0 @@
language: go
os:
- linux
- osx
go:
- 1.12
script:
- go test -v

View File

@ -1,11 +0,0 @@
Main author and maintainer of pongo2:
* Florian Schlachter <flori@n-schlachter.de>
Contributors (in no specific order):
* @romanoaugusto88
* @vitalbh
* @blaubaer
Feel free to add yourself to the list or to modify your entry if you did a contribution.

View File

@ -1,20 +0,0 @@
The MIT License (MIT)
Copyright (c) 2013-2014 Florian Schlachter
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,273 +0,0 @@
# [pongo](https://en.wikipedia.org/wiki/Pongo_%28genus%29)2
[![Join the chat at https://gitter.im/flosch/pongo2](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/flosch/pongo2)
[![GoDoc](https://godoc.org/github.com/flosch/pongo2?status.svg)](https://godoc.org/github.com/flosch/pongo2)
[![Build Status](https://travis-ci.org/flosch/pongo2.svg?branch=master)](https://travis-ci.org/flosch/pongo2)
[![Backers on Open Collective](https://opencollective.com/pongo2/backers/badge.svg)](#backers)
[![Sponsors on Open Collective](https://opencollective.com/pongo2/sponsors/badge.svg)](#sponsors)
pongo2 is the successor of [pongo](https://github.com/flosch/pongo), a Django-syntax like templating-language.
Install/update using `go get` (no dependencies required by pongo2):
```
go get -u github.com/flosch/pongo2
```
Please use the [issue tracker](https://github.com/flosch/pongo2/issues) if you're encountering any problems with pongo2 or if you need help with implementing tags or filters ([create a ticket!](https://github.com/flosch/pongo2/issues/new)).
## First impression of a template
```HTML+Django
<html><head><title>Our admins and users</title></head>
{# This is a short example to give you a quick overview of pongo2's syntax. #}
{% macro user_details(user, is_admin=false) %}
<div class="user_item">
<!-- Let's indicate a user's good karma -->
<h2 {% if (user.karma >= 40) || (user.karma > calc_avg_karma(userlist)+5) %}
class="karma-good"{% endif %}>
<!-- This will call user.String() automatically if available: -->
{{ user }}
</h2>
<!-- Will print a human-readable time duration like "3 weeks ago" -->
<p>This user registered {{ user.register_date|naturaltime }}.</p>
<!-- Let's allow the users to write down their biography using markdown;
we will only show the first 15 words as a preview -->
<p>The user's biography:</p>
<p>{{ user.biography|markdown|truncatewords_html:15 }}
<a href="/user/{{ user.id }}/">read more</a></p>
{% if is_admin %}<p>This user is an admin!</p>{% endif %}
</div>
{% endmacro %}
<body>
<!-- Make use of the macro defined above to avoid repetitive HTML code
since we want to use the same code for admins AND members -->
<h1>Our admins</h1>
{% for admin in adminlist %}
{{ user_details(admin, true) }}
{% endfor %}
<h1>Our members</h1>
{% for user in userlist %}
{{ user_details(user) }}
{% endfor %}
</body>
</html>
```
## Development status
**Latest stable release**: v3.0 (`go get -u gopkg.in/flosch/pongo2.v3` / [`v3`](https://github.com/flosch/pongo2/tree/v3)-branch)
**Current development**: v4 (`master`-branch)
*Note*: With the release of pongo v4 the branch v2 will be deprecated.
**Deprecated versions** (not supported anymore): v1
| Topic | Status |
| ------------------------------------ | -------------------------------------------------------------------------------------- |
| Django version compatibility: | [1.7](https://docs.djangoproject.com/en/1.7/ref/templates/builtins/) |
| *Missing* (planned) **filters**: | none ([hints](https://github.com/flosch/pongo2/blob/master/filters_builtin.go#L3)) |
| *Missing* (planned) **tags**: | none ([hints](https://github.com/flosch/pongo2/blob/master/tags.go#L3)) |
Please also have a look on the [caveats](https://github.com/flosch/pongo2#caveats) and on the [official add-ons](https://github.com/flosch/pongo2#official).
## Features (and new in pongo2)
* Entirely rewritten from the ground-up.
* [Advanced C-like expressions](https://github.com/flosch/pongo2/blob/master/template_tests/expressions.tpl).
* [Complex function calls within expressions](https://github.com/flosch/pongo2/blob/master/template_tests/function_calls_wrapper.tpl).
* [Easy API to create new filters and tags](http://godoc.org/github.com/flosch/pongo2#RegisterFilter) ([including parsing arguments](http://godoc.org/github.com/flosch/pongo2#Parser))
* Additional features:
* Macros including importing macros from other files (see [template_tests/macro.tpl](https://github.com/flosch/pongo2/blob/master/template_tests/macro.tpl))
* [Template sandboxing](https://godoc.org/github.com/flosch/pongo2#TemplateSet) ([directory patterns](http://golang.org/pkg/path/filepath/#Match), banned tags/filters)
## Recent API changes within pongo2
If you're using the `master`-branch of pongo2, you might be interested in this section. Since pongo2 is still in development (even though there is a first stable release!), there could be (backwards-incompatible) API changes over time. To keep track of these and therefore make it painless for you to adapt your codebase, I'll list them here.
* Function signature for tag execution changed: not taking a `bytes.Buffer` anymore; instead `Execute()`-functions are now taking a `TemplateWriter` interface.
* Function signature for tag and filter parsing/execution changed (`error` return type changed to `*Error`).
* `INodeEvaluator` has been removed and got replaced by `IEvaluator`. You can change your existing tags/filters by simply replacing the interface.
* Two new helper functions: [`RenderTemplateFile()`](https://godoc.org/github.com/flosch/pongo2#RenderTemplateFile) and [`RenderTemplateString()`](https://godoc.org/github.com/flosch/pongo2#RenderTemplateString).
* `Template.ExecuteRW()` is now [`Template.ExecuteWriter()`](https://godoc.org/github.com/flosch/pongo2#Template.ExecuteWriter)
* `Template.Execute*()` functions do now take a `pongo2.Context` directly (no pointer anymore).
## How you can help
* Write [filters](https://github.com/flosch/pongo2/blob/master/filters_builtin.go#L3) / [tags] by forking pongo2 and sending pull requests
* Write/improve code tests (use the following command to see what tests are missing: `go test -v -cover -covermode=count -coverprofile=cover.out && go tool cover -html=cover.out` or have a look on [gocover.io/github.com/flosch/pongo2](http://gocover.io/github.com/flosch/pongo2))
* Write/improve template tests (see the `template_tests/` directory)
* Write middleware, libraries and websites using pongo2. :-)
# Documentation
For a documentation on how the templating language works you can [head over to the Django documentation](https://docs.djangoproject.com/en/dev/topics/templates/). pongo2 aims to be compatible with it.
You can access pongo2's API documentation on [godoc](https://godoc.org/github.com/flosch/pongo2).
## Caveats
### Filters
* **date** / **time**: The `date` and `time` filter are taking the Golang specific time- and date-format (not Django's one) currently. [Take a look on the format here](http://golang.org/pkg/time/#Time.Format).
* **stringformat**: `stringformat` does **not** take Python's string format syntax as a parameter, instead it takes Go's. Essentially `{{ 3.14|stringformat:"pi is %.2f" }}` is `fmt.Sprintf("pi is %.2f", 3.14)`.
* **escape** / **force_escape**: Unlike Django's behaviour, the `escape`-filter is applied immediately. Therefore there is no need for a `force_escape`-filter yet.
### Tags
* **for**: All the `forloop` fields (like `forloop.counter`) are written with a capital letter at the beginning. For example, the `counter` can be accessed by `forloop.Counter` and the parentloop by `forloop.Parentloop`.
* **now**: takes Go's time format (see **date** and **time**-filter).
### Misc
* **not in-operator**: You can check whether a map/struct/string contains a key/field/substring by using the in-operator (or the negation of it):
`{% if key in map %}Key is in map{% else %}Key not in map{% endif %}` or `{% if !(key in map) %}Key is NOT in map{% else %}Key is in map{% endif %}`.
# Add-ons, libraries and helpers
## Official
* [ponginae](https://github.com/flosch/ponginae) - A web-framework for Go (using pongo2).
* [pongo2-tools](https://github.com/flosch/pongo2-tools) - Official tools and helpers for pongo2
* [pongo2-addons](https://github.com/flosch/pongo2-addons) - Official additional filters/tags for pongo2 (for example a **markdown**-filter). They are in their own repository because they're relying on 3rd-party-libraries.
## 3rd-party
* [beego-pongo2](https://github.com/oal/beego-pongo2) - A tiny little helper for using Pongo2 with [Beego](https://github.com/astaxie/beego).
* [beego-pongo2.v2](https://github.com/ipfans/beego-pongo2.v2) - Same as `beego-pongo2`, but for pongo2 v2.
* [macaron-pongo2](https://github.com/macaron-contrib/pongo2) - pongo2 support for [Macaron](https://github.com/Unknwon/macaron), a modular web framework.
* [ginpongo2](https://github.com/ngerakines/ginpongo2) - middleware for [gin](github.com/gin-gonic/gin) to use pongo2 templates
* [Build'n support for Iris' template engine](https://github.com/kataras/iris)
* [pongo2gin](https://gitlab.com/go-box/pongo2gin) - alternative renderer for [gin](github.com/gin-gonic/gin) to use pongo2 templates
* [pongo2-trans](https://github.com/digitalcrab/pongo2trans) - `trans`-tag implementation for internationalization
* [tpongo2](https://github.com/tango-contrib/tpongo2) - pongo2 support for [Tango](https://github.com/lunny/tango), a micro-kernel & pluggable web framework.
* [p2cli](https://github.com/wrouesnel/p2cli) - command line templating utility based on pongo2
Please add your project to this list and send me a pull request when you've developed something nice for pongo2.
# API-usage examples
Please see the documentation for a full list of provided API methods.
## A tiny example (template string)
```Go
// Compile the template first (i. e. creating the AST)
tpl, err := pongo2.FromString("Hello {{ name|capfirst }}!")
if err != nil {
panic(err)
}
// Now you can render the template with the given
// pongo2.Context how often you want to.
out, err := tpl.Execute(pongo2.Context{"name": "florian"})
if err != nil {
panic(err)
}
fmt.Println(out) // Output: Hello Florian!
```
## Example server-usage (template file)
```Go
package main
import (
"github.com/flosch/pongo2"
"net/http"
)
// Pre-compiling the templates at application startup using the
// little Must()-helper function (Must() will panic if FromFile()
// or FromString() will return with an error - that's it).
// It's faster to pre-compile it anywhere at startup and only
// execute the template later.
var tplExample = pongo2.Must(pongo2.FromFile("example.html"))
func examplePage(w http.ResponseWriter, r *http.Request) {
// Execute the template per HTTP request
err := tplExample.ExecuteWriter(pongo2.Context{"query": r.FormValue("query")}, w)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
func main() {
http.HandleFunc("/", examplePage)
http.ListenAndServe(":8080", nil)
}
```
# Benchmark
The benchmarks have been run on the my machine (`Intel(R) Core(TM) i7-2600 CPU @ 3.40GHz`) using the command:
go test -bench . -cpu 1,2,4,8
All benchmarks are compiling (depends on the benchmark) and executing the `template_tests/complex.tpl` template.
The results are:
BenchmarkExecuteComplexWithSandboxActive 50000 60450 ns/op
BenchmarkExecuteComplexWithSandboxActive-2 50000 56998 ns/op
BenchmarkExecuteComplexWithSandboxActive-4 50000 60343 ns/op
BenchmarkExecuteComplexWithSandboxActive-8 50000 64229 ns/op
BenchmarkCompileAndExecuteComplexWithSandboxActive 10000 164410 ns/op
BenchmarkCompileAndExecuteComplexWithSandboxActive-2 10000 156682 ns/op
BenchmarkCompileAndExecuteComplexWithSandboxActive-4 10000 164821 ns/op
BenchmarkCompileAndExecuteComplexWithSandboxActive-8 10000 171806 ns/op
BenchmarkParallelExecuteComplexWithSandboxActive 50000 60428 ns/op
BenchmarkParallelExecuteComplexWithSandboxActive-2 50000 31887 ns/op
BenchmarkParallelExecuteComplexWithSandboxActive-4 100000 22810 ns/op
BenchmarkParallelExecuteComplexWithSandboxActive-8 100000 18820 ns/op
BenchmarkExecuteComplexWithoutSandbox 50000 56942 ns/op
BenchmarkExecuteComplexWithoutSandbox-2 50000 56168 ns/op
BenchmarkExecuteComplexWithoutSandbox-4 50000 57838 ns/op
BenchmarkExecuteComplexWithoutSandbox-8 50000 60539 ns/op
BenchmarkCompileAndExecuteComplexWithoutSandbox 10000 162086 ns/op
BenchmarkCompileAndExecuteComplexWithoutSandbox-2 10000 159771 ns/op
BenchmarkCompileAndExecuteComplexWithoutSandbox-4 10000 163826 ns/op
BenchmarkCompileAndExecuteComplexWithoutSandbox-8 10000 169062 ns/op
BenchmarkParallelExecuteComplexWithoutSandbox 50000 57152 ns/op
BenchmarkParallelExecuteComplexWithoutSandbox-2 50000 30276 ns/op
BenchmarkParallelExecuteComplexWithoutSandbox-4 100000 22065 ns/op
BenchmarkParallelExecuteComplexWithoutSandbox-8 100000 18034 ns/op
Benchmarked on October 2nd 2014.
## Contributors
This project exists thanks to all the people who contribute.
<a href="https://github.com/flosch/pongo2/graphs/contributors"><img src="https://opencollective.com/pongo2/contributors.svg?width=890&button=false" /></a>
## Backers
Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/pongo2#backer)]
<a href="https://opencollective.com/pongo2#backers" target="_blank"><img src="https://opencollective.com/pongo2/backers.svg?width=890"></a>
## Sponsors
Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/pongo2#sponsor)]
<a href="https://opencollective.com/pongo2/sponsor/0/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/0/avatar.svg"></a>
<a href="https://opencollective.com/pongo2/sponsor/1/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/1/avatar.svg"></a>
<a href="https://opencollective.com/pongo2/sponsor/2/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/2/avatar.svg"></a>
<a href="https://opencollective.com/pongo2/sponsor/3/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/3/avatar.svg"></a>
<a href="https://opencollective.com/pongo2/sponsor/4/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/4/avatar.svg"></a>
<a href="https://opencollective.com/pongo2/sponsor/5/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/5/avatar.svg"></a>
<a href="https://opencollective.com/pongo2/sponsor/6/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/6/avatar.svg"></a>
<a href="https://opencollective.com/pongo2/sponsor/7/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/7/avatar.svg"></a>
<a href="https://opencollective.com/pongo2/sponsor/8/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/8/avatar.svg"></a>
<a href="https://opencollective.com/pongo2/sponsor/9/website" target="_blank"><img src="https://opencollective.com/pongo2/sponsor/9/avatar.svg"></a>

View File

@ -1,136 +0,0 @@
package pongo2
import (
"regexp"
"github.com/juju/errors"
)
var reIdentifiers = regexp.MustCompile("^[a-zA-Z0-9_]+$")
var autoescape = true
func SetAutoescape(newValue bool) {
autoescape = newValue
}
// A Context type provides constants, variables, instances or functions to a template.
//
// pongo2 automatically provides meta-information or functions through the "pongo2"-key.
// Currently, context["pongo2"] contains the following keys:
// 1. version: returns the version string
//
// Template examples for accessing items from your context:
// {{ myconstant }}
// {{ myfunc("test", 42) }}
// {{ user.name }}
// {{ pongo2.version }}
type Context map[string]interface{}
func (c Context) checkForValidIdentifiers() *Error {
for k, v := range c {
if !reIdentifiers.MatchString(k) {
return &Error{
Sender: "checkForValidIdentifiers",
OrigError: errors.Errorf("context-key '%s' (value: '%+v') is not a valid identifier", k, v),
}
}
}
return nil
}
// Update updates this context with the key/value-pairs from another context.
func (c Context) Update(other Context) Context {
for k, v := range other {
c[k] = v
}
return c
}
// ExecutionContext contains all data important for the current rendering state.
//
// If you're writing a custom tag, your tag's Execute()-function will
// have access to the ExecutionContext. This struct stores anything
// about the current rendering process's Context including
// the Context provided by the user (field Public).
// You can safely use the Private context to provide data to the user's
// template (like a 'forloop'-information). The Shared-context is used
// to share data between tags. All ExecutionContexts share this context.
//
// Please be careful when accessing the Public data.
// PLEASE DO NOT MODIFY THE PUBLIC CONTEXT (read-only).
//
// To create your own execution context within tags, use the
// NewChildExecutionContext(parent) function.
type ExecutionContext struct {
template *Template
Autoescape bool
Public Context
Private Context
Shared Context
}
var pongo2MetaContext = Context{
"version": Version,
}
func newExecutionContext(tpl *Template, ctx Context) *ExecutionContext {
privateCtx := make(Context)
// Make the pongo2-related funcs/vars available to the context
privateCtx["pongo2"] = pongo2MetaContext
return &ExecutionContext{
template: tpl,
Public: ctx,
Private: privateCtx,
Autoescape: autoescape,
}
}
func NewChildExecutionContext(parent *ExecutionContext) *ExecutionContext {
newctx := &ExecutionContext{
template: parent.template,
Public: parent.Public,
Private: make(Context),
Autoescape: parent.Autoescape,
}
newctx.Shared = parent.Shared
// Copy all existing private items
newctx.Private.Update(parent.Private)
return newctx
}
func (ctx *ExecutionContext) Error(msg string, token *Token) *Error {
return ctx.OrigError(errors.New(msg), token)
}
func (ctx *ExecutionContext) OrigError(err error, token *Token) *Error {
filename := ctx.template.name
var line, col int
if token != nil {
// No tokens available
// TODO: Add location (from where?)
filename = token.Filename
line = token.Line
col = token.Col
}
return &Error{
Template: ctx.template,
Filename: filename,
Line: line,
Column: col,
Token: token,
Sender: "execution",
OrigError: err,
}
}
func (ctx *ExecutionContext) Logf(format string, args ...interface{}) {
ctx.template.set.logf(format, args...)
}

View File

@ -1,31 +0,0 @@
// A Django-syntax like template-engine
//
// Blog posts about pongo2 (including introduction and migration):
// https://www.florian-schlachter.de/?tag=pongo2
//
// Complete documentation on the template language:
// https://docs.djangoproject.com/en/dev/topics/templates/
//
// Try out pongo2 live in the pongo2 playground:
// https://www.florian-schlachter.de/pongo2/
//
// Make sure to read README.md in the repository as well.
//
// A tiny example with template strings:
//
// (Snippet on playground: https://www.florian-schlachter.de/pongo2/?id=1206546277)
//
// // Compile the template first (i. e. creating the AST)
// tpl, err := pongo2.FromString("Hello {{ name|capfirst }}!")
// if err != nil {
// panic(err)
// }
// // Now you can render the template with the given
// // pongo2.Context how often you want to.
// out, err := tpl.Execute(pongo2.Context{"name": "fred"})
// if err != nil {
// panic(err)
// }
// fmt.Println(out) // Output: Hello Fred!
//
package pongo2

View File

@ -1,91 +0,0 @@
package pongo2
import (
"bufio"
"fmt"
"os"
)
// The Error type is being used to address an error during lexing, parsing or
// execution. If you want to return an error object (for example in your own
// tag or filter) fill this object with as much information as you have.
// Make sure "Sender" is always given (if you're returning an error within
// a filter, make Sender equals 'filter:yourfilter'; same goes for tags: 'tag:mytag').
// It's okay if you only fill in ErrorMsg if you don't have any other details at hand.
type Error struct {
Template *Template
Filename string
Line int
Column int
Token *Token
Sender string
OrigError error
}
func (e *Error) updateFromTokenIfNeeded(template *Template, t *Token) *Error {
if e.Template == nil {
e.Template = template
}
if e.Token == nil {
e.Token = t
if e.Line <= 0 {
e.Line = t.Line
e.Column = t.Col
}
}
return e
}
// Returns a nice formatted error string.
func (e *Error) Error() string {
s := "[Error"
if e.Sender != "" {
s += " (where: " + e.Sender + ")"
}
if e.Filename != "" {
s += " in " + e.Filename
}
if e.Line > 0 {
s += fmt.Sprintf(" | Line %d Col %d", e.Line, e.Column)
if e.Token != nil {
s += fmt.Sprintf(" near '%s'", e.Token.Val)
}
}
s += "] "
s += e.OrigError.Error()
return s
}
// RawLine returns the affected line from the original template, if available.
func (e *Error) RawLine() (line string, available bool, outErr error) {
if e.Line <= 0 || e.Filename == "<string>" {
return "", false, nil
}
filename := e.Filename
if e.Template != nil {
filename = e.Template.set.resolveFilename(e.Template, e.Filename)
}
file, err := os.Open(filename)
if err != nil {
return "", false, err
}
defer func() {
err := file.Close()
if err != nil && outErr == nil {
outErr = err
}
}()
scanner := bufio.NewScanner(file)
l := 0
for scanner.Scan() {
l++
if l == e.Line {
return scanner.Text(), true, nil
}
}
return "", false, nil
}

View File

@ -1,143 +0,0 @@
package pongo2
import (
"fmt"
"github.com/juju/errors"
)
// FilterFunction is the type filter functions must fulfil
type FilterFunction func(in *Value, param *Value) (out *Value, err *Error)
var filters map[string]FilterFunction
func init() {
filters = make(map[string]FilterFunction)
}
// FilterExists returns true if the given filter is already registered
func FilterExists(name string) bool {
_, existing := filters[name]
return existing
}
// RegisterFilter registers a new filter. If there's already a filter with the same
// name, RegisterFilter will panic. You usually want to call this
// function in the filter's init() function:
// http://golang.org/doc/effective_go.html#init
//
// See http://www.florian-schlachter.de/post/pongo2/ for more about
// writing filters and tags.
func RegisterFilter(name string, fn FilterFunction) error {
if FilterExists(name) {
return errors.Errorf("filter with name '%s' is already registered", name)
}
filters[name] = fn
return nil
}
// ReplaceFilter replaces an already registered filter with a new implementation. Use this
// function with caution since it allows you to change existing filter behaviour.
func ReplaceFilter(name string, fn FilterFunction) error {
if !FilterExists(name) {
return errors.Errorf("filter with name '%s' does not exist (therefore cannot be overridden)", name)
}
filters[name] = fn
return nil
}
// MustApplyFilter behaves like ApplyFilter, but panics on an error.
func MustApplyFilter(name string, value *Value, param *Value) *Value {
val, err := ApplyFilter(name, value, param)
if err != nil {
panic(err)
}
return val
}
// ApplyFilter applies a filter to a given value using the given parameters.
// Returns a *pongo2.Value or an error.
func ApplyFilter(name string, value *Value, param *Value) (*Value, *Error) {
fn, existing := filters[name]
if !existing {
return nil, &Error{
Sender: "applyfilter",
OrigError: errors.Errorf("Filter with name '%s' not found.", name),
}
}
// Make sure param is a *Value
if param == nil {
param = AsValue(nil)
}
return fn(value, param)
}
type filterCall struct {
token *Token
name string
parameter IEvaluator
filterFunc FilterFunction
}
func (fc *filterCall) Execute(v *Value, ctx *ExecutionContext) (*Value, *Error) {
var param *Value
var err *Error
if fc.parameter != nil {
param, err = fc.parameter.Evaluate(ctx)
if err != nil {
return nil, err
}
} else {
param = AsValue(nil)
}
filteredValue, err := fc.filterFunc(v, param)
if err != nil {
return nil, err.updateFromTokenIfNeeded(ctx.template, fc.token)
}
return filteredValue, nil
}
// Filter = IDENT | IDENT ":" FilterArg | IDENT "|" Filter
func (p *Parser) parseFilter() (*filterCall, *Error) {
identToken := p.MatchType(TokenIdentifier)
// Check filter ident
if identToken == nil {
return nil, p.Error("Filter name must be an identifier.", nil)
}
filter := &filterCall{
token: identToken,
name: identToken.Val,
}
// Get the appropriate filter function and bind it
filterFn, exists := filters[identToken.Val]
if !exists {
return nil, p.Error(fmt.Sprintf("Filter '%s' does not exist.", identToken.Val), identToken)
}
filter.filterFunc = filterFn
// Check for filter-argument (2 tokens needed: ':' ARG)
if p.Match(TokenSymbol, ":") != nil {
if p.Peek(TokenSymbol, "}}") != nil {
return nil, p.Error("Filter parameter required after ':'.", nil)
}
// Get filter argument expression
v, err := p.parseVariableOrLiteral()
if err != nil {
return nil, err
}
filter.parameter = v
}
return filter, nil
}

View File

@ -1,927 +0,0 @@
package pongo2
/* Filters that are provided through github.com/flosch/pongo2-addons:
------------------------------------------------------------------
filesizeformat
slugify
timesince
timeuntil
Filters that won't be added:
----------------------------
get_static_prefix (reason: web-framework specific)
pprint (reason: python-specific)
static (reason: web-framework specific)
Reconsideration (not implemented yet):
--------------------------------------
force_escape (reason: not yet needed since this is the behaviour of pongo2's escape filter)
safeseq (reason: same reason as `force_escape`)
unordered_list (python-specific; not sure whether needed or not)
dictsort (python-specific; maybe one could add a filter to sort a list of structs by a specific field name)
dictsortreversed (see dictsort)
*/
import (
"bytes"
"fmt"
"math/rand"
"net/url"
"regexp"
"strconv"
"strings"
"time"
"unicode/utf8"
"github.com/juju/errors"
)
func init() {
rand.Seed(time.Now().Unix())
RegisterFilter("escape", filterEscape)
RegisterFilter("safe", filterSafe)
RegisterFilter("escapejs", filterEscapejs)
RegisterFilter("add", filterAdd)
RegisterFilter("addslashes", filterAddslashes)
RegisterFilter("capfirst", filterCapfirst)
RegisterFilter("center", filterCenter)
RegisterFilter("cut", filterCut)
RegisterFilter("date", filterDate)
RegisterFilter("default", filterDefault)
RegisterFilter("default_if_none", filterDefaultIfNone)
RegisterFilter("divisibleby", filterDivisibleby)
RegisterFilter("first", filterFirst)
RegisterFilter("floatformat", filterFloatformat)
RegisterFilter("get_digit", filterGetdigit)
RegisterFilter("iriencode", filterIriencode)
RegisterFilter("join", filterJoin)
RegisterFilter("last", filterLast)
RegisterFilter("length", filterLength)
RegisterFilter("length_is", filterLengthis)
RegisterFilter("linebreaks", filterLinebreaks)
RegisterFilter("linebreaksbr", filterLinebreaksbr)
RegisterFilter("linenumbers", filterLinenumbers)
RegisterFilter("ljust", filterLjust)
RegisterFilter("lower", filterLower)
RegisterFilter("make_list", filterMakelist)
RegisterFilter("phone2numeric", filterPhone2numeric)
RegisterFilter("pluralize", filterPluralize)
RegisterFilter("random", filterRandom)
RegisterFilter("removetags", filterRemovetags)
RegisterFilter("rjust", filterRjust)
RegisterFilter("slice", filterSlice)
RegisterFilter("split", filterSplit)
RegisterFilter("stringformat", filterStringformat)
RegisterFilter("striptags", filterStriptags)
RegisterFilter("time", filterDate) // time uses filterDate (same golang-format)
RegisterFilter("title", filterTitle)
RegisterFilter("truncatechars", filterTruncatechars)
RegisterFilter("truncatechars_html", filterTruncatecharsHTML)
RegisterFilter("truncatewords", filterTruncatewords)
RegisterFilter("truncatewords_html", filterTruncatewordsHTML)
RegisterFilter("upper", filterUpper)
RegisterFilter("urlencode", filterUrlencode)
RegisterFilter("urlize", filterUrlize)
RegisterFilter("urlizetrunc", filterUrlizetrunc)
RegisterFilter("wordcount", filterWordcount)
RegisterFilter("wordwrap", filterWordwrap)
RegisterFilter("yesno", filterYesno)
RegisterFilter("float", filterFloat) // pongo-specific
RegisterFilter("integer", filterInteger) // pongo-specific
}
func filterTruncatecharsHelper(s string, newLen int) string {
runes := []rune(s)
if newLen < len(runes) {
if newLen >= 3 {
return fmt.Sprintf("%s...", string(runes[:newLen-3]))
}
// Not enough space for the ellipsis
return string(runes[:newLen])
}
return string(runes)
}
func filterTruncateHTMLHelper(value string, newOutput *bytes.Buffer, cond func() bool, fn func(c rune, s int, idx int) int, finalize func()) {
vLen := len(value)
var tagStack []string
idx := 0
for idx < vLen && !cond() {
c, s := utf8.DecodeRuneInString(value[idx:])
if c == utf8.RuneError {
idx += s
continue
}
if c == '<' {
newOutput.WriteRune(c)
idx += s // consume "<"
if idx+1 < vLen {
if value[idx] == '/' {
// Close tag
newOutput.WriteString("/")
tag := ""
idx++ // consume "/"
for idx < vLen {
c2, size2 := utf8.DecodeRuneInString(value[idx:])
if c2 == utf8.RuneError {
idx += size2
continue
}
// End of tag found
if c2 == '>' {
idx++ // consume ">"
break
}
tag += string(c2)
idx += size2
}
if len(tagStack) > 0 {
// Ideally, the close tag is TOP of tag stack
// In malformed HTML, it must not be, so iterate through the stack and remove the tag
for i := len(tagStack) - 1; i >= 0; i-- {
if tagStack[i] == tag {
// Found the tag
tagStack[i] = tagStack[len(tagStack)-1]
tagStack = tagStack[:len(tagStack)-1]
break
}
}
}
newOutput.WriteString(tag)
newOutput.WriteString(">")
} else {
// Open tag
tag := ""
params := false
for idx < vLen {
c2, size2 := utf8.DecodeRuneInString(value[idx:])
if c2 == utf8.RuneError {
idx += size2
continue
}
newOutput.WriteRune(c2)
// End of tag found
if c2 == '>' {
idx++ // consume ">"
break
}
if !params {
if c2 == ' ' {
params = true
} else {
tag += string(c2)
}
}
idx += size2
}
// Add tag to stack
tagStack = append(tagStack, tag)
}
}
} else {
idx = fn(c, s, idx)
}
}
finalize()
for i := len(tagStack) - 1; i >= 0; i-- {
tag := tagStack[i]
// Close everything from the regular tag stack
newOutput.WriteString(fmt.Sprintf("</%s>", tag))
}
}
func filterTruncatechars(in *Value, param *Value) (*Value, *Error) {
s := in.String()
newLen := param.Integer()
return AsValue(filterTruncatecharsHelper(s, newLen)), nil
}
func filterTruncatecharsHTML(in *Value, param *Value) (*Value, *Error) {
value := in.String()
newLen := max(param.Integer()-3, 0)
newOutput := bytes.NewBuffer(nil)
textcounter := 0
filterTruncateHTMLHelper(value, newOutput, func() bool {
return textcounter >= newLen
}, func(c rune, s int, idx int) int {
textcounter++
newOutput.WriteRune(c)
return idx + s
}, func() {
if textcounter >= newLen && textcounter < len(value) {
newOutput.WriteString("...")
}
})
return AsSafeValue(newOutput.String()), nil
}
func filterTruncatewords(in *Value, param *Value) (*Value, *Error) {
words := strings.Fields(in.String())
n := param.Integer()
if n <= 0 {
return AsValue(""), nil
}
nlen := min(len(words), n)
out := make([]string, 0, nlen)
for i := 0; i < nlen; i++ {
out = append(out, words[i])
}
if n < len(words) {
out = append(out, "...")
}
return AsValue(strings.Join(out, " ")), nil
}
func filterTruncatewordsHTML(in *Value, param *Value) (*Value, *Error) {
value := in.String()
newLen := max(param.Integer(), 0)
newOutput := bytes.NewBuffer(nil)
wordcounter := 0
filterTruncateHTMLHelper(value, newOutput, func() bool {
return wordcounter >= newLen
}, func(_ rune, _ int, idx int) int {
// Get next word
wordFound := false
for idx < len(value) {
c2, size2 := utf8.DecodeRuneInString(value[idx:])
if c2 == utf8.RuneError {
idx += size2
continue
}
if c2 == '<' {
// HTML tag start, don't consume it
return idx
}
newOutput.WriteRune(c2)
idx += size2
if c2 == ' ' || c2 == '.' || c2 == ',' || c2 == ';' {
// Word ends here, stop capturing it now
break
} else {
wordFound = true
}
}
if wordFound {
wordcounter++
}
return idx
}, func() {
if wordcounter >= newLen {
newOutput.WriteString("...")
}
})
return AsSafeValue(newOutput.String()), nil
}
func filterEscape(in *Value, param *Value) (*Value, *Error) {
output := strings.Replace(in.String(), "&", "&amp;", -1)
output = strings.Replace(output, ">", "&gt;", -1)
output = strings.Replace(output, "<", "&lt;", -1)
output = strings.Replace(output, "\"", "&quot;", -1)
output = strings.Replace(output, "'", "&#39;", -1)
return AsValue(output), nil
}
func filterSafe(in *Value, param *Value) (*Value, *Error) {
return in, nil // nothing to do here, just to keep track of the safe application
}
func filterEscapejs(in *Value, param *Value) (*Value, *Error) {
sin := in.String()
var b bytes.Buffer
idx := 0
for idx < len(sin) {
c, size := utf8.DecodeRuneInString(sin[idx:])
if c == utf8.RuneError {
idx += size
continue
}
if c == '\\' {
// Escape seq?
if idx+1 < len(sin) {
switch sin[idx+1] {
case 'r':
b.WriteString(fmt.Sprintf(`\u%04X`, '\r'))
idx += 2
continue
case 'n':
b.WriteString(fmt.Sprintf(`\u%04X`, '\n'))
idx += 2
continue
/*case '\'':
b.WriteString(fmt.Sprintf(`\u%04X`, '\''))
idx += 2
continue
case '"':
b.WriteString(fmt.Sprintf(`\u%04X`, '"'))
idx += 2
continue*/
}
}
}
if (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == ' ' || c == '/' {
b.WriteRune(c)
} else {
b.WriteString(fmt.Sprintf(`\u%04X`, c))
}
idx += size
}
return AsValue(b.String()), nil
}
func filterAdd(in *Value, param *Value) (*Value, *Error) {
if in.IsNumber() && param.IsNumber() {
if in.IsFloat() || param.IsFloat() {
return AsValue(in.Float() + param.Float()), nil
}
return AsValue(in.Integer() + param.Integer()), nil
}
// If in/param is not a number, we're relying on the
// Value's String() conversion and just add them both together
return AsValue(in.String() + param.String()), nil
}
func filterAddslashes(in *Value, param *Value) (*Value, *Error) {
output := strings.Replace(in.String(), "\\", "\\\\", -1)
output = strings.Replace(output, "\"", "\\\"", -1)
output = strings.Replace(output, "'", "\\'", -1)
return AsValue(output), nil
}
func filterCut(in *Value, param *Value) (*Value, *Error) {
return AsValue(strings.Replace(in.String(), param.String(), "", -1)), nil
}
func filterLength(in *Value, param *Value) (*Value, *Error) {
return AsValue(in.Len()), nil
}
func filterLengthis(in *Value, param *Value) (*Value, *Error) {
return AsValue(in.Len() == param.Integer()), nil
}
func filterDefault(in *Value, param *Value) (*Value, *Error) {
if !in.IsTrue() {
return param, nil
}
return in, nil
}
func filterDefaultIfNone(in *Value, param *Value) (*Value, *Error) {
if in.IsNil() {
return param, nil
}
return in, nil
}
func filterDivisibleby(in *Value, param *Value) (*Value, *Error) {
if param.Integer() == 0 {
return AsValue(false), nil
}
return AsValue(in.Integer()%param.Integer() == 0), nil
}
func filterFirst(in *Value, param *Value) (*Value, *Error) {
if in.CanSlice() && in.Len() > 0 {
return in.Index(0), nil
}
return AsValue(""), nil
}
func filterFloatformat(in *Value, param *Value) (*Value, *Error) {
val := in.Float()
decimals := -1
if !param.IsNil() {
// Any argument provided?
decimals = param.Integer()
}
// if the argument is not a number (e. g. empty), the default
// behaviour is trim the result
trim := !param.IsNumber()
if decimals <= 0 {
// argument is negative or zero, so we
// want the output being trimmed
decimals = -decimals
trim = true
}
if trim {
// Remove zeroes
if float64(int(val)) == val {
return AsValue(in.Integer()), nil
}
}
return AsValue(strconv.FormatFloat(val, 'f', decimals, 64)), nil
}
func filterGetdigit(in *Value, param *Value) (*Value, *Error) {
i := param.Integer()
l := len(in.String()) // do NOT use in.Len() here!
if i <= 0 || i > l {
return in, nil
}
return AsValue(in.String()[l-i] - 48), nil
}
const filterIRIChars = "/#%[]=:;$&()+,!?*@'~"
func filterIriencode(in *Value, param *Value) (*Value, *Error) {
var b bytes.Buffer
sin := in.String()
for _, r := range sin {
if strings.IndexRune(filterIRIChars, r) >= 0 {
b.WriteRune(r)
} else {
b.WriteString(url.QueryEscape(string(r)))
}
}
return AsValue(b.String()), nil
}
func filterJoin(in *Value, param *Value) (*Value, *Error) {
if !in.CanSlice() {
return in, nil
}
sep := param.String()
sl := make([]string, 0, in.Len())
for i := 0; i < in.Len(); i++ {
sl = append(sl, in.Index(i).String())
}
return AsValue(strings.Join(sl, sep)), nil
}
func filterLast(in *Value, param *Value) (*Value, *Error) {
if in.CanSlice() && in.Len() > 0 {
return in.Index(in.Len() - 1), nil
}
return AsValue(""), nil
}
func filterUpper(in *Value, param *Value) (*Value, *Error) {
return AsValue(strings.ToUpper(in.String())), nil
}
func filterLower(in *Value, param *Value) (*Value, *Error) {
return AsValue(strings.ToLower(in.String())), nil
}
func filterMakelist(in *Value, param *Value) (*Value, *Error) {
s := in.String()
result := make([]string, 0, len(s))
for _, c := range s {
result = append(result, string(c))
}
return AsValue(result), nil
}
func filterCapfirst(in *Value, param *Value) (*Value, *Error) {
if in.Len() <= 0 {
return AsValue(""), nil
}
t := in.String()
r, size := utf8.DecodeRuneInString(t)
return AsValue(strings.ToUpper(string(r)) + t[size:]), nil
}
func filterCenter(in *Value, param *Value) (*Value, *Error) {
width := param.Integer()
slen := in.Len()
if width <= slen {
return in, nil
}
spaces := width - slen
left := spaces/2 + spaces%2
right := spaces / 2
return AsValue(fmt.Sprintf("%s%s%s", strings.Repeat(" ", left),
in.String(), strings.Repeat(" ", right))), nil
}
func filterDate(in *Value, param *Value) (*Value, *Error) {
t, isTime := in.Interface().(time.Time)
if !isTime {
return nil, &Error{
Sender: "filter:date",
OrigError: errors.New("filter input argument must be of type 'time.Time'"),
}
}
return AsValue(t.Format(param.String())), nil
}
func filterFloat(in *Value, param *Value) (*Value, *Error) {
return AsValue(in.Float()), nil
}
func filterInteger(in *Value, param *Value) (*Value, *Error) {
return AsValue(in.Integer()), nil
}
func filterLinebreaks(in *Value, param *Value) (*Value, *Error) {
if in.Len() == 0 {
return in, nil
}
var b bytes.Buffer
// Newline = <br />
// Double newline = <p>...</p>
lines := strings.Split(in.String(), "\n")
lenlines := len(lines)
opened := false
for idx, line := range lines {
if !opened {
b.WriteString("<p>")
opened = true
}
b.WriteString(line)
if idx < lenlines-1 && strings.TrimSpace(lines[idx]) != "" {
// We've not reached the end
if strings.TrimSpace(lines[idx+1]) == "" {
// Next line is empty
if opened {
b.WriteString("</p>")
opened = false
}
} else {
b.WriteString("<br />")
}
}
}
if opened {
b.WriteString("</p>")
}
return AsValue(b.String()), nil
}
func filterSplit(in *Value, param *Value) (*Value, *Error) {
chunks := strings.Split(in.String(), param.String())
return AsValue(chunks), nil
}
func filterLinebreaksbr(in *Value, param *Value) (*Value, *Error) {
return AsValue(strings.Replace(in.String(), "\n", "<br />", -1)), nil
}
func filterLinenumbers(in *Value, param *Value) (*Value, *Error) {
lines := strings.Split(in.String(), "\n")
output := make([]string, 0, len(lines))
for idx, line := range lines {
output = append(output, fmt.Sprintf("%d. %s", idx+1, line))
}
return AsValue(strings.Join(output, "\n")), nil
}
func filterLjust(in *Value, param *Value) (*Value, *Error) {
times := param.Integer() - in.Len()
if times < 0 {
times = 0
}
return AsValue(fmt.Sprintf("%s%s", in.String(), strings.Repeat(" ", times))), nil
}
func filterUrlencode(in *Value, param *Value) (*Value, *Error) {
return AsValue(url.QueryEscape(in.String())), nil
}
// TODO: This regexp could do some work
var filterUrlizeURLRegexp = regexp.MustCompile(`((((http|https)://)|www\.|((^|[ ])[0-9A-Za-z_\-]+(\.com|\.net|\.org|\.info|\.biz|\.de))))(?U:.*)([ ]+|$)`)
var filterUrlizeEmailRegexp = regexp.MustCompile(`(\w+@\w+\.\w{2,4})`)
func filterUrlizeHelper(input string, autoescape bool, trunc int) (string, error) {
var soutErr error
sout := filterUrlizeURLRegexp.ReplaceAllStringFunc(input, func(raw_url string) string {
var prefix string
var suffix string
if strings.HasPrefix(raw_url, " ") {
prefix = " "
}
if strings.HasSuffix(raw_url, " ") {
suffix = " "
}
raw_url = strings.TrimSpace(raw_url)
t, err := ApplyFilter("iriencode", AsValue(raw_url), nil)
if err != nil {
soutErr = err
return ""
}
url := t.String()
if !strings.HasPrefix(url, "http") {
url = fmt.Sprintf("http://%s", url)
}
title := raw_url
if trunc > 3 && len(title) > trunc {
title = fmt.Sprintf("%s...", title[:trunc-3])
}
if autoescape {
t, err := ApplyFilter("escape", AsValue(title), nil)
if err != nil {
soutErr = err
return ""
}
title = t.String()
}
return fmt.Sprintf(`%s<a href="%s" rel="nofollow">%s</a>%s`, prefix, url, title, suffix)
})
if soutErr != nil {
return "", soutErr
}
sout = filterUrlizeEmailRegexp.ReplaceAllStringFunc(sout, func(mail string) string {
title := mail
if trunc > 3 && len(title) > trunc {
title = fmt.Sprintf("%s...", title[:trunc-3])
}
return fmt.Sprintf(`<a href="mailto:%s">%s</a>`, mail, title)
})
return sout, nil
}
func filterUrlize(in *Value, param *Value) (*Value, *Error) {
autoescape := true
if param.IsBool() {
autoescape = param.Bool()
}
s, err := filterUrlizeHelper(in.String(), autoescape, -1)
if err != nil {
}
return AsValue(s), nil
}
func filterUrlizetrunc(in *Value, param *Value) (*Value, *Error) {
s, err := filterUrlizeHelper(in.String(), true, param.Integer())
if err != nil {
return nil, &Error{
Sender: "filter:urlizetrunc",
OrigError: errors.New("you cannot pass more than 2 arguments to filter 'pluralize'"),
}
}
return AsValue(s), nil
}
func filterStringformat(in *Value, param *Value) (*Value, *Error) {
return AsValue(fmt.Sprintf(param.String(), in.Interface())), nil
}
var reStriptags = regexp.MustCompile("<[^>]*?>")
func filterStriptags(in *Value, param *Value) (*Value, *Error) {
s := in.String()
// Strip all tags
s = reStriptags.ReplaceAllString(s, "")
return AsValue(strings.TrimSpace(s)), nil
}
// https://en.wikipedia.org/wiki/Phoneword
var filterPhone2numericMap = map[string]string{
"a": "2", "b": "2", "c": "2", "d": "3", "e": "3", "f": "3", "g": "4", "h": "4", "i": "4", "j": "5", "k": "5",
"l": "5", "m": "6", "n": "6", "o": "6", "p": "7", "q": "7", "r": "7", "s": "7", "t": "8", "u": "8", "v": "8",
"w": "9", "x": "9", "y": "9", "z": "9",
}
func filterPhone2numeric(in *Value, param *Value) (*Value, *Error) {
sin := in.String()
for k, v := range filterPhone2numericMap {
sin = strings.Replace(sin, k, v, -1)
sin = strings.Replace(sin, strings.ToUpper(k), v, -1)
}
return AsValue(sin), nil
}
func filterPluralize(in *Value, param *Value) (*Value, *Error) {
if in.IsNumber() {
// Works only on numbers
if param.Len() > 0 {
endings := strings.Split(param.String(), ",")
if len(endings) > 2 {
return nil, &Error{
Sender: "filter:pluralize",
OrigError: errors.New("you cannot pass more than 2 arguments to filter 'pluralize'"),
}
}
if len(endings) == 1 {
// 1 argument
if in.Integer() != 1 {
return AsValue(endings[0]), nil
}
} else {
if in.Integer() != 1 {
// 2 arguments
return AsValue(endings[1]), nil
}
return AsValue(endings[0]), nil
}
} else {
if in.Integer() != 1 {
// return default 's'
return AsValue("s"), nil
}
}
return AsValue(""), nil
}
return nil, &Error{
Sender: "filter:pluralize",
OrigError: errors.New("filter 'pluralize' does only work on numbers"),
}
}
func filterRandom(in *Value, param *Value) (*Value, *Error) {
if !in.CanSlice() || in.Len() <= 0 {
return in, nil
}
i := rand.Intn(in.Len())
return in.Index(i), nil
}
func filterRemovetags(in *Value, param *Value) (*Value, *Error) {
s := in.String()
tags := strings.Split(param.String(), ",")
// Strip only specific tags
for _, tag := range tags {
re := regexp.MustCompile(fmt.Sprintf("</?%s/?>", tag))
s = re.ReplaceAllString(s, "")
}
return AsValue(strings.TrimSpace(s)), nil
}
func filterRjust(in *Value, param *Value) (*Value, *Error) {
return AsValue(fmt.Sprintf(fmt.Sprintf("%%%ds", param.Integer()), in.String())), nil
}
func filterSlice(in *Value, param *Value) (*Value, *Error) {
comp := strings.Split(param.String(), ":")
if len(comp) != 2 {
return nil, &Error{
Sender: "filter:slice",
OrigError: errors.New("Slice string must have the format 'from:to' [from/to can be omitted, but the ':' is required]"),
}
}
if !in.CanSlice() {
return in, nil
}
from := AsValue(comp[0]).Integer()
to := in.Len()
if from > to {
from = to
}
vto := AsValue(comp[1]).Integer()
if vto >= from && vto <= in.Len() {
to = vto
}
return in.Slice(from, to), nil
}
func filterTitle(in *Value, param *Value) (*Value, *Error) {
if !in.IsString() {
return AsValue(""), nil
}
return AsValue(strings.Title(strings.ToLower(in.String()))), nil
}
func filterWordcount(in *Value, param *Value) (*Value, *Error) {
return AsValue(len(strings.Fields(in.String()))), nil
}
func filterWordwrap(in *Value, param *Value) (*Value, *Error) {
words := strings.Fields(in.String())
wordsLen := len(words)
wrapAt := param.Integer()
if wrapAt <= 0 {
return in, nil
}
linecount := wordsLen/wrapAt + wordsLen%wrapAt
lines := make([]string, 0, linecount)
for i := 0; i < linecount; i++ {
lines = append(lines, strings.Join(words[wrapAt*i:min(wrapAt*(i+1), wordsLen)], " "))
}
return AsValue(strings.Join(lines, "\n")), nil
}
func filterYesno(in *Value, param *Value) (*Value, *Error) {
choices := map[int]string{
0: "yes",
1: "no",
2: "maybe",
}
paramString := param.String()
customChoices := strings.Split(paramString, ",")
if len(paramString) > 0 {
if len(customChoices) > 3 {
return nil, &Error{
Sender: "filter:yesno",
OrigError: errors.Errorf("You cannot pass more than 3 options to the 'yesno'-filter (got: '%s').", paramString),
}
}
if len(customChoices) < 2 {
return nil, &Error{
Sender: "filter:yesno",
OrigError: errors.Errorf("You must pass either no or at least 2 arguments to the 'yesno'-filter (got: '%s').", paramString),
}
}
// Map to the options now
choices[0] = customChoices[0]
choices[1] = customChoices[1]
if len(customChoices) == 3 {
choices[2] = customChoices[2]
}
}
// maybe
if in.IsNil() {
return AsValue(choices[2]), nil
}
// yes
if in.IsTrue() {
return AsValue(choices[0]), nil
}
// no
return AsValue(choices[1]), nil
}

View File

@ -1,13 +0,0 @@
module github.com/flosch/pongo2
require (
github.com/go-check/check v0.0.0-20180628173108-788fd7840127
github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5
github.com/juju/loggo v0.0.0-20180524022052-584905176618 // indirect
github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073 // indirect
github.com/kr/pretty v0.1.0 // indirect
github.com/mattn/goveralls v0.0.2 // indirect
golang.org/x/tools v0.0.0-20181221001348-537d06c36207 // indirect
gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce // indirect
gopkg.in/yaml.v2 v2.2.2 // indirect
)

View File

@ -1,15 +0,0 @@
package pongo2
func max(a, b int) int {
if a > b {
return a
}
return b
}
func min(a, b int) int {
if a < b {
return a
}
return b
}

View File

@ -1,432 +0,0 @@
package pongo2
import (
"fmt"
"strings"
"unicode/utf8"
"github.com/juju/errors"
)
const (
TokenError = iota
EOF
TokenHTML
TokenKeyword
TokenIdentifier
TokenString
TokenNumber
TokenSymbol
)
var (
tokenSpaceChars = " \n\r\t"
tokenIdentifierChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"
tokenIdentifierCharsWithDigits = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789"
tokenDigits = "0123456789"
// Available symbols in pongo2 (within filters/tag)
TokenSymbols = []string{
// 3-Char symbols
"{{-", "-}}", "{%-", "-%}",
// 2-Char symbols
"==", ">=", "<=", "&&", "||", "{{", "}}", "{%", "%}", "!=", "<>",
// 1-Char symbol
"(", ")", "+", "-", "*", "<", ">", "/", "^", ",", ".", "!", "|", ":", "=", "%",
}
// Available keywords in pongo2
TokenKeywords = []string{"in", "and", "or", "not", "true", "false", "as", "export"}
)
type TokenType int
type Token struct {
Filename string
Typ TokenType
Val string
Line int
Col int
TrimWhitespaces bool
}
type lexerStateFn func() lexerStateFn
type lexer struct {
name string
input string
start int // start pos of the item
pos int // current pos
width int // width of last rune
tokens []*Token
errored bool
startline int
startcol int
line int
col int
inVerbatim bool
verbatimName string
}
func (t *Token) String() string {
val := t.Val
if len(val) > 1000 {
val = fmt.Sprintf("%s...%s", val[:10], val[len(val)-5:len(val)])
}
typ := ""
switch t.Typ {
case TokenHTML:
typ = "HTML"
case TokenError:
typ = "Error"
case TokenIdentifier:
typ = "Identifier"
case TokenKeyword:
typ = "Keyword"
case TokenNumber:
typ = "Number"
case TokenString:
typ = "String"
case TokenSymbol:
typ = "Symbol"
default:
typ = "Unknown"
}
return fmt.Sprintf("<Token Typ=%s (%d) Val='%s' Line=%d Col=%d, WT=%t>",
typ, t.Typ, val, t.Line, t.Col, t.TrimWhitespaces)
}
func lex(name string, input string) ([]*Token, *Error) {
l := &lexer{
name: name,
input: input,
tokens: make([]*Token, 0, 100),
line: 1,
col: 1,
startline: 1,
startcol: 1,
}
l.run()
if l.errored {
errtoken := l.tokens[len(l.tokens)-1]
return nil, &Error{
Filename: name,
Line: errtoken.Line,
Column: errtoken.Col,
Sender: "lexer",
OrigError: errors.New(errtoken.Val),
}
}
return l.tokens, nil
}
func (l *lexer) value() string {
return l.input[l.start:l.pos]
}
func (l *lexer) length() int {
return l.pos - l.start
}
func (l *lexer) emit(t TokenType) {
tok := &Token{
Filename: l.name,
Typ: t,
Val: l.value(),
Line: l.startline,
Col: l.startcol,
}
if t == TokenString {
// Escape sequence \" in strings
tok.Val = strings.Replace(tok.Val, `\"`, `"`, -1)
tok.Val = strings.Replace(tok.Val, `\\`, `\`, -1)
}
if t == TokenSymbol && len(tok.Val) == 3 && (strings.HasSuffix(tok.Val, "-") || strings.HasPrefix(tok.Val, "-")) {
tok.TrimWhitespaces = true
tok.Val = strings.Replace(tok.Val, "-", "", -1)
}
l.tokens = append(l.tokens, tok)
l.start = l.pos
l.startline = l.line
l.startcol = l.col
}
func (l *lexer) next() rune {
if l.pos >= len(l.input) {
l.width = 0
return EOF
}
r, w := utf8.DecodeRuneInString(l.input[l.pos:])
l.width = w
l.pos += l.width
l.col += l.width
return r
}
func (l *lexer) backup() {
l.pos -= l.width
l.col -= l.width
}
func (l *lexer) peek() rune {
r := l.next()
l.backup()
return r
}
func (l *lexer) ignore() {
l.start = l.pos
l.startline = l.line
l.startcol = l.col
}
func (l *lexer) accept(what string) bool {
if strings.IndexRune(what, l.next()) >= 0 {
return true
}
l.backup()
return false
}
func (l *lexer) acceptRun(what string) {
for strings.IndexRune(what, l.next()) >= 0 {
}
l.backup()
}
func (l *lexer) errorf(format string, args ...interface{}) lexerStateFn {
t := &Token{
Filename: l.name,
Typ: TokenError,
Val: fmt.Sprintf(format, args...),
Line: l.startline,
Col: l.startcol,
}
l.tokens = append(l.tokens, t)
l.errored = true
l.startline = l.line
l.startcol = l.col
return nil
}
func (l *lexer) eof() bool {
return l.start >= len(l.input)-1
}
func (l *lexer) run() {
for {
// TODO: Support verbatim tag names
// https://docs.djangoproject.com/en/dev/ref/templates/builtins/#verbatim
if l.inVerbatim {
name := l.verbatimName
if name != "" {
name += " "
}
if strings.HasPrefix(l.input[l.pos:], fmt.Sprintf("{%% endverbatim %s%%}", name)) { // end verbatim
if l.pos > l.start {
l.emit(TokenHTML)
}
w := len("{% endverbatim %}")
l.pos += w
l.col += w
l.ignore()
l.inVerbatim = false
}
} else if strings.HasPrefix(l.input[l.pos:], "{% verbatim %}") { // tag
if l.pos > l.start {
l.emit(TokenHTML)
}
l.inVerbatim = true
w := len("{% verbatim %}")
l.pos += w
l.col += w
l.ignore()
}
if !l.inVerbatim {
// Ignore single-line comments {# ... #}
if strings.HasPrefix(l.input[l.pos:], "{#") {
if l.pos > l.start {
l.emit(TokenHTML)
}
l.pos += 2 // pass '{#'
l.col += 2
for {
switch l.peek() {
case EOF:
l.errorf("Single-line comment not closed.")
return
case '\n':
l.errorf("Newline not permitted in a single-line comment.")
return
}
if strings.HasPrefix(l.input[l.pos:], "#}") {
l.pos += 2 // pass '#}'
l.col += 2
break
}
l.next()
}
l.ignore() // ignore whole comment
// Comment skipped
continue // next token
}
if strings.HasPrefix(l.input[l.pos:], "{{") || // variable
strings.HasPrefix(l.input[l.pos:], "{%") { // tag
if l.pos > l.start {
l.emit(TokenHTML)
}
l.tokenize()
if l.errored {
return
}
continue
}
}
switch l.peek() {
case '\n':
l.line++
l.col = 0
}
if l.next() == EOF {
break
}
}
if l.pos > l.start {
l.emit(TokenHTML)
}
if l.inVerbatim {
l.errorf("verbatim-tag not closed, got EOF.")
}
}
func (l *lexer) tokenize() {
for state := l.stateCode; state != nil; {
state = state()
}
}
func (l *lexer) stateCode() lexerStateFn {
outer_loop:
for {
switch {
case l.accept(tokenSpaceChars):
if l.value() == "\n" {
return l.errorf("Newline not allowed within tag/variable.")
}
l.ignore()
continue
case l.accept(tokenIdentifierChars):
return l.stateIdentifier
case l.accept(tokenDigits):
return l.stateNumber
case l.accept(`"'`):
return l.stateString
}
// Check for symbol
for _, sym := range TokenSymbols {
if strings.HasPrefix(l.input[l.start:], sym) {
l.pos += len(sym)
l.col += l.length()
l.emit(TokenSymbol)
if sym == "%}" || sym == "-%}" || sym == "}}" || sym == "-}}" {
// Tag/variable end, return after emit
return nil
}
continue outer_loop
}
}
break
}
// Normal shut down
return nil
}
func (l *lexer) stateIdentifier() lexerStateFn {
l.acceptRun(tokenIdentifierChars)
l.acceptRun(tokenIdentifierCharsWithDigits)
for _, kw := range TokenKeywords {
if kw == l.value() {
l.emit(TokenKeyword)
return l.stateCode
}
}
l.emit(TokenIdentifier)
return l.stateCode
}
func (l *lexer) stateNumber() lexerStateFn {
l.acceptRun(tokenDigits)
if l.accept(tokenIdentifierCharsWithDigits) {
// This seems to be an identifier starting with a number.
// See https://github.com/flosch/pongo2/issues/151
return l.stateIdentifier()
}
/*
Maybe context-sensitive number lexing?
* comments.0.Text // first comment
* usercomments.1.0 // second user, first comment
* if (score >= 8.5) // 8.5 as a number
if l.peek() == '.' {
l.accept(".")
if !l.accept(tokenDigits) {
return l.errorf("Malformed number.")
}
l.acceptRun(tokenDigits)
}
*/
l.emit(TokenNumber)
return l.stateCode
}
func (l *lexer) stateString() lexerStateFn {
quotationMark := l.value()
l.ignore()
l.startcol-- // we're starting the position at the first "
for !l.accept(quotationMark) {
switch l.next() {
case '\\':
// escape sequence
switch l.peek() {
case '"', '\\':
l.next()
default:
return l.errorf("Unknown escape sequence: \\%c", l.peek())
}
case EOF:
return l.errorf("Unexpected EOF, string not closed.")
case '\n':
return l.errorf("Newline in string is not allowed.")
}
}
l.backup()
l.emit(TokenString)
l.next()
l.ignore()
return l.stateCode
}

View File

@ -1,16 +0,0 @@
package pongo2
// The root document
type nodeDocument struct {
Nodes []INode
}
func (doc *nodeDocument) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
for _, n := range doc.Nodes {
err := n.Execute(ctx, writer)
if err != nil {
return err
}
}
return nil
}

View File

@ -1,23 +0,0 @@
package pongo2
import (
"strings"
)
type nodeHTML struct {
token *Token
trimLeft bool
trimRight bool
}
func (n *nodeHTML) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
res := n.token.Val
if n.trimLeft {
res = strings.TrimLeft(res, tokenSpaceChars)
}
if n.trimRight {
res = strings.TrimRight(res, tokenSpaceChars)
}
writer.WriteString(res)
return nil
}

View File

@ -1,16 +0,0 @@
package pongo2
type NodeWrapper struct {
Endtag string
nodes []INode
}
func (wrapper *NodeWrapper) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
for _, n := range wrapper.nodes {
err := n.Execute(ctx, writer)
if err != nil {
return err
}
}
return nil
}

View File

@ -1,26 +0,0 @@
package pongo2
// Options allow you to change the behavior of template-engine.
// You can change the options before calling the Execute method.
type Options struct {
// If this is set to true the first newline after a block is removed (block, not variable tag!). Defaults to false.
TrimBlocks bool
// If this is set to true leading spaces and tabs are stripped from the start of a line to a block. Defaults to false
LStripBlocks bool
}
func newOptions() *Options {
return &Options{
TrimBlocks: false,
LStripBlocks: false,
}
}
// Update updates this options from another options.
func (opt *Options) Update(other *Options) *Options {
opt.TrimBlocks = other.TrimBlocks
opt.LStripBlocks = other.LStripBlocks
return opt
}

View File

@ -1,309 +0,0 @@
package pongo2
import (
"fmt"
"strings"
"github.com/juju/errors"
)
type INode interface {
Execute(*ExecutionContext, TemplateWriter) *Error
}
type IEvaluator interface {
INode
GetPositionToken() *Token
Evaluate(*ExecutionContext) (*Value, *Error)
FilterApplied(name string) bool
}
// The parser provides you a comprehensive and easy tool to
// work with the template document and arguments provided by
// the user for your custom tag.
//
// The parser works on a token list which will be provided by pongo2.
// A token is a unit you can work with. Tokens are either of type identifier,
// string, number, keyword, HTML or symbol.
//
// (See Token's documentation for more about tokens)
type Parser struct {
name string
idx int
tokens []*Token
lastToken *Token
// if the parser parses a template document, here will be
// a reference to it (needed to access the template through Tags)
template *Template
}
// Creates a new parser to parse tokens.
// Used inside pongo2 to parse documents and to provide an easy-to-use
// parser for tag authors
func newParser(name string, tokens []*Token, template *Template) *Parser {
p := &Parser{
name: name,
tokens: tokens,
template: template,
}
if len(tokens) > 0 {
p.lastToken = tokens[len(tokens)-1]
}
return p
}
// Consume one token. It will be gone forever.
func (p *Parser) Consume() {
p.ConsumeN(1)
}
// Consume N tokens. They will be gone forever.
func (p *Parser) ConsumeN(count int) {
p.idx += count
}
// Returns the current token.
func (p *Parser) Current() *Token {
return p.Get(p.idx)
}
// Returns the CURRENT token if the given type matches.
// Consumes this token on success.
func (p *Parser) MatchType(typ TokenType) *Token {
if t := p.PeekType(typ); t != nil {
p.Consume()
return t
}
return nil
}
// Returns the CURRENT token if the given type AND value matches.
// Consumes this token on success.
func (p *Parser) Match(typ TokenType, val string) *Token {
if t := p.Peek(typ, val); t != nil {
p.Consume()
return t
}
return nil
}
// Returns the CURRENT token if the given type AND *one* of
// the given values matches.
// Consumes this token on success.
func (p *Parser) MatchOne(typ TokenType, vals ...string) *Token {
for _, val := range vals {
if t := p.Peek(typ, val); t != nil {
p.Consume()
return t
}
}
return nil
}
// Returns the CURRENT token if the given type matches.
// It DOES NOT consume the token.
func (p *Parser) PeekType(typ TokenType) *Token {
return p.PeekTypeN(0, typ)
}
// Returns the CURRENT token if the given type AND value matches.
// It DOES NOT consume the token.
func (p *Parser) Peek(typ TokenType, val string) *Token {
return p.PeekN(0, typ, val)
}
// Returns the CURRENT token if the given type AND *one* of
// the given values matches.
// It DOES NOT consume the token.
func (p *Parser) PeekOne(typ TokenType, vals ...string) *Token {
for _, v := range vals {
t := p.PeekN(0, typ, v)
if t != nil {
return t
}
}
return nil
}
// Returns the tokens[current position + shift] token if the
// given type AND value matches for that token.
// DOES NOT consume the token.
func (p *Parser) PeekN(shift int, typ TokenType, val string) *Token {
t := p.Get(p.idx + shift)
if t != nil {
if t.Typ == typ && t.Val == val {
return t
}
}
return nil
}
// Returns the tokens[current position + shift] token if the given type matches.
// DOES NOT consume the token for that token.
func (p *Parser) PeekTypeN(shift int, typ TokenType) *Token {
t := p.Get(p.idx + shift)
if t != nil {
if t.Typ == typ {
return t
}
}
return nil
}
// Returns the UNCONSUMED token count.
func (p *Parser) Remaining() int {
return len(p.tokens) - p.idx
}
// Returns the total token count.
func (p *Parser) Count() int {
return len(p.tokens)
}
// Returns tokens[i] or NIL (if i >= len(tokens))
func (p *Parser) Get(i int) *Token {
if i < len(p.tokens) && i >= 0 {
return p.tokens[i]
}
return nil
}
// Returns tokens[current-position + shift] or NIL
// (if (current-position + i) >= len(tokens))
func (p *Parser) GetR(shift int) *Token {
i := p.idx + shift
return p.Get(i)
}
// Error produces a nice error message and returns an error-object.
// The 'token'-argument is optional. If provided, it will take
// the token's position information. If not provided, it will
// automatically use the CURRENT token's position information.
func (p *Parser) Error(msg string, token *Token) *Error {
if token == nil {
// Set current token
token = p.Current()
if token == nil {
// Set to last token
if len(p.tokens) > 0 {
token = p.tokens[len(p.tokens)-1]
}
}
}
var line, col int
if token != nil {
line = token.Line
col = token.Col
}
return &Error{
Template: p.template,
Filename: p.name,
Sender: "parser",
Line: line,
Column: col,
Token: token,
OrigError: errors.New(msg),
}
}
// Wraps all nodes between starting tag and "{% endtag %}" and provides
// one simple interface to execute the wrapped nodes.
// It returns a parser to process provided arguments to the tag.
func (p *Parser) WrapUntilTag(names ...string) (*NodeWrapper, *Parser, *Error) {
wrapper := &NodeWrapper{}
var tagArgs []*Token
for p.Remaining() > 0 {
// New tag, check whether we have to stop wrapping here
if p.Peek(TokenSymbol, "{%") != nil {
tagIdent := p.PeekTypeN(1, TokenIdentifier)
if tagIdent != nil {
// We've found a (!) end-tag
found := false
for _, n := range names {
if tagIdent.Val == n {
found = true
break
}
}
// We only process the tag if we've found an end tag
if found {
// Okay, endtag found.
p.ConsumeN(2) // '{%' tagname
for {
if p.Match(TokenSymbol, "%}") != nil {
// Okay, end the wrapping here
wrapper.Endtag = tagIdent.Val
return wrapper, newParser(p.template.name, tagArgs, p.template), nil
}
t := p.Current()
p.Consume()
if t == nil {
return nil, nil, p.Error("Unexpected EOF.", p.lastToken)
}
tagArgs = append(tagArgs, t)
}
}
}
}
// Otherwise process next element to be wrapped
node, err := p.parseDocElement()
if err != nil {
return nil, nil, err
}
wrapper.nodes = append(wrapper.nodes, node)
}
return nil, nil, p.Error(fmt.Sprintf("Unexpected EOF, expected tag %s.", strings.Join(names, " or ")),
p.lastToken)
}
// Skips all nodes between starting tag and "{% endtag %}"
func (p *Parser) SkipUntilTag(names ...string) *Error {
for p.Remaining() > 0 {
// New tag, check whether we have to stop wrapping here
if p.Peek(TokenSymbol, "{%") != nil {
tagIdent := p.PeekTypeN(1, TokenIdentifier)
if tagIdent != nil {
// We've found a (!) end-tag
found := false
for _, n := range names {
if tagIdent.Val == n {
found = true
break
}
}
// We only process the tag if we've found an end tag
if found {
// Okay, endtag found.
p.ConsumeN(2) // '{%' tagname
for {
if p.Match(TokenSymbol, "%}") != nil {
// Done skipping, exit.
return nil
}
}
}
}
}
t := p.Current()
p.Consume()
if t == nil {
return p.Error("Unexpected EOF.", p.lastToken)
}
}
return p.Error(fmt.Sprintf("Unexpected EOF, expected tag %s.", strings.Join(names, " or ")), p.lastToken)
}

View File

@ -1,59 +0,0 @@
package pongo2
// Doc = { ( Filter | Tag | HTML ) }
func (p *Parser) parseDocElement() (INode, *Error) {
t := p.Current()
switch t.Typ {
case TokenHTML:
n := &nodeHTML{token: t}
left := p.PeekTypeN(-1, TokenSymbol)
right := p.PeekTypeN(1, TokenSymbol)
n.trimLeft = left != nil && left.TrimWhitespaces
n.trimRight = right != nil && right.TrimWhitespaces
p.Consume() // consume HTML element
return n, nil
case TokenSymbol:
switch t.Val {
case "{{":
// parse variable
variable, err := p.parseVariableElement()
if err != nil {
return nil, err
}
return variable, nil
case "{%":
// parse tag
tag, err := p.parseTagElement()
if err != nil {
return nil, err
}
return tag, nil
}
}
return nil, p.Error("Unexpected token (only HTML/tags/filters in templates allowed)", t)
}
func (tpl *Template) parse() *Error {
tpl.parser = newParser(tpl.name, tpl.tokens, tpl)
doc, err := tpl.parser.parseDocument()
if err != nil {
return err
}
tpl.root = doc
return nil
}
func (p *Parser) parseDocument() (*nodeDocument, *Error) {
doc := &nodeDocument{}
for p.Remaining() > 0 {
node, err := p.parseDocElement()
if err != nil {
return nil, err
}
doc.Nodes = append(doc.Nodes, node)
}
return doc, nil
}

View File

@ -1,503 +0,0 @@
package pongo2
import (
"fmt"
"math"
)
type Expression struct {
// TODO: Add location token?
expr1 IEvaluator
expr2 IEvaluator
opToken *Token
}
type relationalExpression struct {
// TODO: Add location token?
expr1 IEvaluator
expr2 IEvaluator
opToken *Token
}
type simpleExpression struct {
negate bool
negativeSign bool
term1 IEvaluator
term2 IEvaluator
opToken *Token
}
type term struct {
// TODO: Add location token?
factor1 IEvaluator
factor2 IEvaluator
opToken *Token
}
type power struct {
// TODO: Add location token?
power1 IEvaluator
power2 IEvaluator
}
func (expr *Expression) FilterApplied(name string) bool {
return expr.expr1.FilterApplied(name) && (expr.expr2 == nil ||
(expr.expr2 != nil && expr.expr2.FilterApplied(name)))
}
func (expr *relationalExpression) FilterApplied(name string) bool {
return expr.expr1.FilterApplied(name) && (expr.expr2 == nil ||
(expr.expr2 != nil && expr.expr2.FilterApplied(name)))
}
func (expr *simpleExpression) FilterApplied(name string) bool {
return expr.term1.FilterApplied(name) && (expr.term2 == nil ||
(expr.term2 != nil && expr.term2.FilterApplied(name)))
}
func (expr *term) FilterApplied(name string) bool {
return expr.factor1.FilterApplied(name) && (expr.factor2 == nil ||
(expr.factor2 != nil && expr.factor2.FilterApplied(name)))
}
func (expr *power) FilterApplied(name string) bool {
return expr.power1.FilterApplied(name) && (expr.power2 == nil ||
(expr.power2 != nil && expr.power2.FilterApplied(name)))
}
func (expr *Expression) GetPositionToken() *Token {
return expr.expr1.GetPositionToken()
}
func (expr *relationalExpression) GetPositionToken() *Token {
return expr.expr1.GetPositionToken()
}
func (expr *simpleExpression) GetPositionToken() *Token {
return expr.term1.GetPositionToken()
}
func (expr *term) GetPositionToken() *Token {
return expr.factor1.GetPositionToken()
}
func (expr *power) GetPositionToken() *Token {
return expr.power1.GetPositionToken()
}
func (expr *Expression) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (expr *relationalExpression) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (expr *simpleExpression) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (expr *term) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (expr *power) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (expr *Expression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
v1, err := expr.expr1.Evaluate(ctx)
if err != nil {
return nil, err
}
if expr.expr2 != nil {
switch expr.opToken.Val {
case "and", "&&":
if !v1.IsTrue() {
return AsValue(false), nil
} else {
v2, err := expr.expr2.Evaluate(ctx)
if err != nil {
return nil, err
}
return AsValue(v2.IsTrue()), nil
}
case "or", "||":
if v1.IsTrue() {
return AsValue(true), nil
} else {
v2, err := expr.expr2.Evaluate(ctx)
if err != nil {
return nil, err
}
return AsValue(v2.IsTrue()), nil
}
default:
return nil, ctx.Error(fmt.Sprintf("unimplemented: %s", expr.opToken.Val), expr.opToken)
}
} else {
return v1, nil
}
}
func (expr *relationalExpression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
v1, err := expr.expr1.Evaluate(ctx)
if err != nil {
return nil, err
}
if expr.expr2 != nil {
v2, err := expr.expr2.Evaluate(ctx)
if err != nil {
return nil, err
}
switch expr.opToken.Val {
case "<=":
if v1.IsFloat() || v2.IsFloat() {
return AsValue(v1.Float() <= v2.Float()), nil
}
return AsValue(v1.Integer() <= v2.Integer()), nil
case ">=":
if v1.IsFloat() || v2.IsFloat() {
return AsValue(v1.Float() >= v2.Float()), nil
}
return AsValue(v1.Integer() >= v2.Integer()), nil
case "==":
return AsValue(v1.EqualValueTo(v2)), nil
case ">":
if v1.IsFloat() || v2.IsFloat() {
return AsValue(v1.Float() > v2.Float()), nil
}
return AsValue(v1.Integer() > v2.Integer()), nil
case "<":
if v1.IsFloat() || v2.IsFloat() {
return AsValue(v1.Float() < v2.Float()), nil
}
return AsValue(v1.Integer() < v2.Integer()), nil
case "!=", "<>":
return AsValue(!v1.EqualValueTo(v2)), nil
case "in":
return AsValue(v2.Contains(v1)), nil
default:
return nil, ctx.Error(fmt.Sprintf("unimplemented: %s", expr.opToken.Val), expr.opToken)
}
} else {
return v1, nil
}
}
func (expr *simpleExpression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
t1, err := expr.term1.Evaluate(ctx)
if err != nil {
return nil, err
}
result := t1
if expr.negate {
result = result.Negate()
}
if expr.negativeSign {
if result.IsNumber() {
switch {
case result.IsFloat():
result = AsValue(-1 * result.Float())
case result.IsInteger():
result = AsValue(-1 * result.Integer())
default:
return nil, ctx.Error("Operation between a number and a non-(float/integer) is not possible", nil)
}
} else {
return nil, ctx.Error("Negative sign on a non-number expression", expr.GetPositionToken())
}
}
if expr.term2 != nil {
t2, err := expr.term2.Evaluate(ctx)
if err != nil {
return nil, err
}
switch expr.opToken.Val {
case "+":
if result.IsFloat() || t2.IsFloat() {
// Result will be a float
return AsValue(result.Float() + t2.Float()), nil
}
// Result will be an integer
return AsValue(result.Integer() + t2.Integer()), nil
case "-":
if result.IsFloat() || t2.IsFloat() {
// Result will be a float
return AsValue(result.Float() - t2.Float()), nil
}
// Result will be an integer
return AsValue(result.Integer() - t2.Integer()), nil
default:
return nil, ctx.Error("Unimplemented", expr.GetPositionToken())
}
}
return result, nil
}
func (expr *term) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
f1, err := expr.factor1.Evaluate(ctx)
if err != nil {
return nil, err
}
if expr.factor2 != nil {
f2, err := expr.factor2.Evaluate(ctx)
if err != nil {
return nil, err
}
switch expr.opToken.Val {
case "*":
if f1.IsFloat() || f2.IsFloat() {
// Result will be float
return AsValue(f1.Float() * f2.Float()), nil
}
// Result will be int
return AsValue(f1.Integer() * f2.Integer()), nil
case "/":
if f1.IsFloat() || f2.IsFloat() {
// Result will be float
return AsValue(f1.Float() / f2.Float()), nil
}
// Result will be int
return AsValue(f1.Integer() / f2.Integer()), nil
case "%":
// Result will be int
return AsValue(f1.Integer() % f2.Integer()), nil
default:
return nil, ctx.Error("unimplemented", expr.opToken)
}
} else {
return f1, nil
}
}
func (expr *power) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
p1, err := expr.power1.Evaluate(ctx)
if err != nil {
return nil, err
}
if expr.power2 != nil {
p2, err := expr.power2.Evaluate(ctx)
if err != nil {
return nil, err
}
return AsValue(math.Pow(p1.Float(), p2.Float())), nil
}
return p1, nil
}
func (p *Parser) parseFactor() (IEvaluator, *Error) {
if p.Match(TokenSymbol, "(") != nil {
expr, err := p.ParseExpression()
if err != nil {
return nil, err
}
if p.Match(TokenSymbol, ")") == nil {
return nil, p.Error("Closing bracket expected after expression", nil)
}
return expr, nil
}
return p.parseVariableOrLiteralWithFilter()
}
func (p *Parser) parsePower() (IEvaluator, *Error) {
pw := new(power)
power1, err := p.parseFactor()
if err != nil {
return nil, err
}
pw.power1 = power1
if p.Match(TokenSymbol, "^") != nil {
power2, err := p.parsePower()
if err != nil {
return nil, err
}
pw.power2 = power2
}
if pw.power2 == nil {
// Shortcut for faster evaluation
return pw.power1, nil
}
return pw, nil
}
func (p *Parser) parseTerm() (IEvaluator, *Error) {
returnTerm := new(term)
factor1, err := p.parsePower()
if err != nil {
return nil, err
}
returnTerm.factor1 = factor1
for p.PeekOne(TokenSymbol, "*", "/", "%") != nil {
if returnTerm.opToken != nil {
// Create new sub-term
returnTerm = &term{
factor1: returnTerm,
}
}
op := p.Current()
p.Consume()
factor2, err := p.parsePower()
if err != nil {
return nil, err
}
returnTerm.opToken = op
returnTerm.factor2 = factor2
}
if returnTerm.opToken == nil {
// Shortcut for faster evaluation
return returnTerm.factor1, nil
}
return returnTerm, nil
}
func (p *Parser) parseSimpleExpression() (IEvaluator, *Error) {
expr := new(simpleExpression)
if sign := p.MatchOne(TokenSymbol, "+", "-"); sign != nil {
if sign.Val == "-" {
expr.negativeSign = true
}
}
if p.Match(TokenSymbol, "!") != nil || p.Match(TokenKeyword, "not") != nil {
expr.negate = true
}
term1, err := p.parseTerm()
if err != nil {
return nil, err
}
expr.term1 = term1
for p.PeekOne(TokenSymbol, "+", "-") != nil {
if expr.opToken != nil {
// New sub expr
expr = &simpleExpression{
term1: expr,
}
}
op := p.Current()
p.Consume()
term2, err := p.parseTerm()
if err != nil {
return nil, err
}
expr.term2 = term2
expr.opToken = op
}
if expr.negate == false && expr.negativeSign == false && expr.term2 == nil {
// Shortcut for faster evaluation
return expr.term1, nil
}
return expr, nil
}
func (p *Parser) parseRelationalExpression() (IEvaluator, *Error) {
expr1, err := p.parseSimpleExpression()
if err != nil {
return nil, err
}
expr := &relationalExpression{
expr1: expr1,
}
if t := p.MatchOne(TokenSymbol, "==", "<=", ">=", "!=", "<>", ">", "<"); t != nil {
expr2, err := p.parseRelationalExpression()
if err != nil {
return nil, err
}
expr.opToken = t
expr.expr2 = expr2
} else if t := p.MatchOne(TokenKeyword, "in"); t != nil {
expr2, err := p.parseSimpleExpression()
if err != nil {
return nil, err
}
expr.opToken = t
expr.expr2 = expr2
}
if expr.expr2 == nil {
// Shortcut for faster evaluation
return expr.expr1, nil
}
return expr, nil
}
func (p *Parser) ParseExpression() (IEvaluator, *Error) {
rexpr1, err := p.parseRelationalExpression()
if err != nil {
return nil, err
}
exp := &Expression{
expr1: rexpr1,
}
if p.PeekOne(TokenSymbol, "&&", "||") != nil || p.PeekOne(TokenKeyword, "and", "or") != nil {
op := p.Current()
p.Consume()
expr2, err := p.ParseExpression()
if err != nil {
return nil, err
}
exp.expr2 = expr2
exp.opToken = op
}
if exp.expr2 == nil {
// Shortcut for faster evaluation
return exp.expr1, nil
}
return exp, nil
}

View File

@ -1,14 +0,0 @@
package pongo2
// Version string
const Version = "dev"
// Must panics, if a Template couldn't successfully parsed. This is how you
// would use it:
// var baseTemplate = pongo2.Must(pongo2.FromFile("templates/base.html"))
func Must(tpl *Template, err error) *Template {
if err != nil {
panic(err)
}
return tpl
}

View File

@ -1,135 +0,0 @@
package pongo2
/* Incomplete:
-----------
verbatim (only the "name" argument is missing for verbatim)
Reconsideration:
----------------
debug (reason: not sure what to output yet)
regroup / Grouping on other properties (reason: maybe too python-specific; not sure how useful this would be in Go)
Following built-in tags wont be added:
--------------------------------------
csrf_token (reason: web-framework specific)
load (reason: python-specific)
url (reason: web-framework specific)
*/
import (
"fmt"
"github.com/juju/errors"
)
type INodeTag interface {
INode
}
// This is the function signature of the tag's parser you will have
// to implement in order to create a new tag.
//
// 'doc' is providing access to the whole document while 'arguments'
// is providing access to the user's arguments to the tag:
//
// {% your_tag_name some "arguments" 123 %}
//
// start_token will be the *Token with the tag's name in it (here: your_tag_name).
//
// Please see the Parser documentation on how to use the parser.
// See RegisterTag()'s documentation for more information about
// writing a tag as well.
type TagParser func(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error)
type tag struct {
name string
parser TagParser
}
var tags map[string]*tag
func init() {
tags = make(map[string]*tag)
}
// Registers a new tag. You usually want to call this
// function in the tag's init() function:
// http://golang.org/doc/effective_go.html#init
//
// See http://www.florian-schlachter.de/post/pongo2/ for more about
// writing filters and tags.
func RegisterTag(name string, parserFn TagParser) error {
_, existing := tags[name]
if existing {
return errors.Errorf("tag with name '%s' is already registered", name)
}
tags[name] = &tag{
name: name,
parser: parserFn,
}
return nil
}
// Replaces an already registered tag with a new implementation. Use this
// function with caution since it allows you to change existing tag behaviour.
func ReplaceTag(name string, parserFn TagParser) error {
_, existing := tags[name]
if !existing {
return errors.Errorf("tag with name '%s' does not exist (therefore cannot be overridden)", name)
}
tags[name] = &tag{
name: name,
parser: parserFn,
}
return nil
}
// Tag = "{%" IDENT ARGS "%}"
func (p *Parser) parseTagElement() (INodeTag, *Error) {
p.Consume() // consume "{%"
tokenName := p.MatchType(TokenIdentifier)
// Check for identifier
if tokenName == nil {
return nil, p.Error("Tag name must be an identifier.", nil)
}
// Check for the existing tag
tag, exists := tags[tokenName.Val]
if !exists {
// Does not exists
return nil, p.Error(fmt.Sprintf("Tag '%s' not found (or beginning tag not provided)", tokenName.Val), tokenName)
}
// Check sandbox tag restriction
if _, isBanned := p.template.set.bannedTags[tokenName.Val]; isBanned {
return nil, p.Error(fmt.Sprintf("Usage of tag '%s' is not allowed (sandbox restriction active).", tokenName.Val), tokenName)
}
var argsToken []*Token
for p.Peek(TokenSymbol, "%}") == nil && p.Remaining() > 0 {
// Add token to args
argsToken = append(argsToken, p.Current())
p.Consume() // next token
}
// EOF?
if p.Remaining() == 0 {
return nil, p.Error("Unexpectedly reached EOF, no tag end found.", p.lastToken)
}
p.Match(TokenSymbol, "%}")
argParser := newParser(p.name, argsToken, p.template)
if len(argsToken) == 0 {
// This is done to have nice EOF error messages
argParser.lastToken = tokenName
}
p.template.level++
defer func() { p.template.level-- }()
return tag.parser(p, tokenName, argParser)
}

View File

@ -1,52 +0,0 @@
package pongo2
type tagAutoescapeNode struct {
wrapper *NodeWrapper
autoescape bool
}
func (node *tagAutoescapeNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
old := ctx.Autoescape
ctx.Autoescape = node.autoescape
err := node.wrapper.Execute(ctx, writer)
if err != nil {
return err
}
ctx.Autoescape = old
return nil
}
func tagAutoescapeParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
autoescapeNode := &tagAutoescapeNode{}
wrapper, _, err := doc.WrapUntilTag("endautoescape")
if err != nil {
return nil, err
}
autoescapeNode.wrapper = wrapper
modeToken := arguments.MatchType(TokenIdentifier)
if modeToken == nil {
return nil, arguments.Error("A mode is required for autoescape-tag.", nil)
}
if modeToken.Val == "on" {
autoescapeNode.autoescape = true
} else if modeToken.Val == "off" {
autoescapeNode.autoescape = false
} else {
return nil, arguments.Error("Only 'on' or 'off' is valid as an autoescape-mode.", nil)
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed autoescape-tag arguments.", nil)
}
return autoescapeNode, nil
}
func init() {
RegisterTag("autoescape", tagAutoescapeParser)
}

View File

@ -1,129 +0,0 @@
package pongo2
import (
"bytes"
"fmt"
)
type tagBlockNode struct {
name string
}
func (node *tagBlockNode) getBlockWrappers(tpl *Template) []*NodeWrapper {
nodeWrappers := make([]*NodeWrapper, 0)
var t *NodeWrapper
for tpl != nil {
t = tpl.blocks[node.name]
if t != nil {
nodeWrappers = append(nodeWrappers, t)
}
tpl = tpl.child
}
return nodeWrappers
}
func (node *tagBlockNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
tpl := ctx.template
if tpl == nil {
panic("internal error: tpl == nil")
}
// Determine the block to execute
blockWrappers := node.getBlockWrappers(tpl)
lenBlockWrappers := len(blockWrappers)
if lenBlockWrappers == 0 {
return ctx.Error("internal error: len(block_wrappers) == 0 in tagBlockNode.Execute()", nil)
}
blockWrapper := blockWrappers[lenBlockWrappers-1]
ctx.Private["block"] = tagBlockInformation{
ctx: ctx,
wrappers: blockWrappers[0 : lenBlockWrappers-1],
}
err := blockWrapper.Execute(ctx, writer)
if err != nil {
return err
}
return nil
}
type tagBlockInformation struct {
ctx *ExecutionContext
wrappers []*NodeWrapper
}
func (t tagBlockInformation) Super() string {
lenWrappers := len(t.wrappers)
if lenWrappers == 0 {
return ""
}
superCtx := NewChildExecutionContext(t.ctx)
superCtx.Private["block"] = tagBlockInformation{
ctx: t.ctx,
wrappers: t.wrappers[0 : lenWrappers-1],
}
blockWrapper := t.wrappers[lenWrappers-1]
buf := bytes.NewBufferString("")
err := blockWrapper.Execute(superCtx, &templateWriter{buf})
if err != nil {
return ""
}
return buf.String()
}
func tagBlockParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
if arguments.Count() == 0 {
return nil, arguments.Error("Tag 'block' requires an identifier.", nil)
}
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
return nil, arguments.Error("First argument for tag 'block' must be an identifier.", nil)
}
if arguments.Remaining() != 0 {
return nil, arguments.Error("Tag 'block' takes exactly 1 argument (an identifier).", nil)
}
wrapper, endtagargs, err := doc.WrapUntilTag("endblock")
if err != nil {
return nil, err
}
if endtagargs.Remaining() > 0 {
endtagnameToken := endtagargs.MatchType(TokenIdentifier)
if endtagnameToken != nil {
if endtagnameToken.Val != nameToken.Val {
return nil, endtagargs.Error(fmt.Sprintf("Name for 'endblock' must equal to 'block'-tag's name ('%s' != '%s').",
nameToken.Val, endtagnameToken.Val), nil)
}
}
if endtagnameToken == nil || endtagargs.Remaining() > 0 {
return nil, endtagargs.Error("Either no or only one argument (identifier) allowed for 'endblock'.", nil)
}
}
tpl := doc.template
if tpl == nil {
panic("internal error: tpl == nil")
}
_, hasBlock := tpl.blocks[nameToken.Val]
if !hasBlock {
tpl.blocks[nameToken.Val] = wrapper
} else {
return nil, arguments.Error(fmt.Sprintf("Block named '%s' already defined", nameToken.Val), nil)
}
return &tagBlockNode{name: nameToken.Val}, nil
}
func init() {
RegisterTag("block", tagBlockParser)
}

View File

@ -1,27 +0,0 @@
package pongo2
type tagCommentNode struct{}
func (node *tagCommentNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
return nil
}
func tagCommentParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
commentNode := &tagCommentNode{}
// TODO: Process the endtag's arguments (see django 'comment'-tag documentation)
err := doc.SkipUntilTag("endcomment")
if err != nil {
return nil, err
}
if arguments.Count() != 0 {
return nil, arguments.Error("Tag 'comment' does not take any argument.", nil)
}
return commentNode, nil
}
func init() {
RegisterTag("comment", tagCommentParser)
}

View File

@ -1,106 +0,0 @@
package pongo2
type tagCycleValue struct {
node *tagCycleNode
value *Value
}
type tagCycleNode struct {
position *Token
args []IEvaluator
idx int
asName string
silent bool
}
func (cv *tagCycleValue) String() string {
return cv.value.String()
}
func (node *tagCycleNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
item := node.args[node.idx%len(node.args)]
node.idx++
val, err := item.Evaluate(ctx)
if err != nil {
return err
}
if t, ok := val.Interface().(*tagCycleValue); ok {
// {% cycle "test1" "test2"
// {% cycle cycleitem %}
// Update the cycle value with next value
item := t.node.args[t.node.idx%len(t.node.args)]
t.node.idx++
val, err := item.Evaluate(ctx)
if err != nil {
return err
}
t.value = val
if !t.node.silent {
writer.WriteString(val.String())
}
} else {
// Regular call
cycleValue := &tagCycleValue{
node: node,
value: val,
}
if node.asName != "" {
ctx.Private[node.asName] = cycleValue
}
if !node.silent {
writer.WriteString(val.String())
}
}
return nil
}
// HINT: We're not supporting the old comma-separated list of expressions argument-style
func tagCycleParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
cycleNode := &tagCycleNode{
position: start,
}
for arguments.Remaining() > 0 {
node, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
cycleNode.args = append(cycleNode.args, node)
if arguments.MatchOne(TokenKeyword, "as") != nil {
// as
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
return nil, arguments.Error("Name (identifier) expected after 'as'.", nil)
}
cycleNode.asName = nameToken.Val
if arguments.MatchOne(TokenIdentifier, "silent") != nil {
cycleNode.silent = true
}
// Now we're finished
break
}
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed cycle-tag.", nil)
}
return cycleNode, nil
}
func init() {
RegisterTag("cycle", tagCycleParser)
}

View File

@ -1,52 +0,0 @@
package pongo2
type tagExtendsNode struct {
filename string
}
func (node *tagExtendsNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
return nil
}
func tagExtendsParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
extendsNode := &tagExtendsNode{}
if doc.template.level > 1 {
return nil, arguments.Error("The 'extends' tag can only defined on root level.", start)
}
if doc.template.parent != nil {
// Already one parent
return nil, arguments.Error("This template has already one parent.", start)
}
if filenameToken := arguments.MatchType(TokenString); filenameToken != nil {
// prepared, static template
// Get parent's filename
parentFilename := doc.template.set.resolveFilename(doc.template, filenameToken.Val)
// Parse the parent
parentTemplate, err := doc.template.set.FromFile(parentFilename)
if err != nil {
return nil, err.(*Error)
}
// Keep track of things
parentTemplate.child = doc.template
doc.template.parent = parentTemplate
extendsNode.filename = parentFilename
} else {
return nil, arguments.Error("Tag 'extends' requires a template filename as string.", nil)
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Tag 'extends' does only take 1 argument.", nil)
}
return extendsNode, nil
}
func init() {
RegisterTag("extends", tagExtendsParser)
}

View File

@ -1,95 +0,0 @@
package pongo2
import (
"bytes"
)
type nodeFilterCall struct {
name string
paramExpr IEvaluator
}
type tagFilterNode struct {
position *Token
bodyWrapper *NodeWrapper
filterChain []*nodeFilterCall
}
func (node *tagFilterNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
temp := bytes.NewBuffer(make([]byte, 0, 1024)) // 1 KiB size
err := node.bodyWrapper.Execute(ctx, temp)
if err != nil {
return err
}
value := AsValue(temp.String())
for _, call := range node.filterChain {
var param *Value
if call.paramExpr != nil {
param, err = call.paramExpr.Evaluate(ctx)
if err != nil {
return err
}
} else {
param = AsValue(nil)
}
value, err = ApplyFilter(call.name, value, param)
if err != nil {
return ctx.Error(err.Error(), node.position)
}
}
writer.WriteString(value.String())
return nil
}
func tagFilterParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
filterNode := &tagFilterNode{
position: start,
}
wrapper, _, err := doc.WrapUntilTag("endfilter")
if err != nil {
return nil, err
}
filterNode.bodyWrapper = wrapper
for arguments.Remaining() > 0 {
filterCall := &nodeFilterCall{}
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
return nil, arguments.Error("Expected a filter name (identifier).", nil)
}
filterCall.name = nameToken.Val
if arguments.MatchOne(TokenSymbol, ":") != nil {
// Filter parameter
// NOTICE: we can't use ParseExpression() here, because it would parse the next filter "|..." as well in the argument list
expr, err := arguments.parseVariableOrLiteral()
if err != nil {
return nil, err
}
filterCall.paramExpr = expr
}
filterNode.filterChain = append(filterNode.filterChain, filterCall)
if arguments.MatchOne(TokenSymbol, "|") == nil {
break
}
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed filter-tag arguments.", nil)
}
return filterNode, nil
}
func init() {
RegisterTag("filter", tagFilterParser)
}

View File

@ -1,49 +0,0 @@
package pongo2
type tagFirstofNode struct {
position *Token
args []IEvaluator
}
func (node *tagFirstofNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
for _, arg := range node.args {
val, err := arg.Evaluate(ctx)
if err != nil {
return err
}
if val.IsTrue() {
if ctx.Autoescape && !arg.FilterApplied("safe") {
val, err = ApplyFilter("escape", val, nil)
if err != nil {
return err
}
}
writer.WriteString(val.String())
return nil
}
}
return nil
}
func tagFirstofParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
firstofNode := &tagFirstofNode{
position: start,
}
for arguments.Remaining() > 0 {
node, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
firstofNode.args = append(firstofNode.args, node)
}
return firstofNode, nil
}
func init() {
RegisterTag("firstof", tagFirstofParser)
}

View File

@ -1,159 +0,0 @@
package pongo2
type tagForNode struct {
key string
value string // only for maps: for key, value in map
objectEvaluator IEvaluator
reversed bool
sorted bool
bodyWrapper *NodeWrapper
emptyWrapper *NodeWrapper
}
type tagForLoopInformation struct {
Counter int
Counter0 int
Revcounter int
Revcounter0 int
First bool
Last bool
Parentloop *tagForLoopInformation
}
func (node *tagForNode) Execute(ctx *ExecutionContext, writer TemplateWriter) (forError *Error) {
// Backup forloop (as parentloop in public context), key-name and value-name
forCtx := NewChildExecutionContext(ctx)
parentloop := forCtx.Private["forloop"]
// Create loop struct
loopInfo := &tagForLoopInformation{
First: true,
}
// Is it a loop in a loop?
if parentloop != nil {
loopInfo.Parentloop = parentloop.(*tagForLoopInformation)
}
// Register loopInfo in public context
forCtx.Private["forloop"] = loopInfo
obj, err := node.objectEvaluator.Evaluate(forCtx)
if err != nil {
return err
}
obj.IterateOrder(func(idx, count int, key, value *Value) bool {
// There's something to iterate over (correct type and at least 1 item)
// Update loop infos and public context
forCtx.Private[node.key] = key
if value != nil {
forCtx.Private[node.value] = value
}
loopInfo.Counter = idx + 1
loopInfo.Counter0 = idx
if idx == 1 {
loopInfo.First = false
}
if idx+1 == count {
loopInfo.Last = true
}
loopInfo.Revcounter = count - idx // TODO: Not sure about this, have to look it up
loopInfo.Revcounter0 = count - (idx + 1) // TODO: Not sure about this, have to look it up
// Render elements with updated context
err := node.bodyWrapper.Execute(forCtx, writer)
if err != nil {
forError = err
return false
}
return true
}, func() {
// Nothing to iterate over (maybe wrong type or no items)
if node.emptyWrapper != nil {
err := node.emptyWrapper.Execute(forCtx, writer)
if err != nil {
forError = err
}
}
}, node.reversed, node.sorted)
return forError
}
func tagForParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
forNode := &tagForNode{}
// Arguments parsing
var valueToken *Token
keyToken := arguments.MatchType(TokenIdentifier)
if keyToken == nil {
return nil, arguments.Error("Expected an key identifier as first argument for 'for'-tag", nil)
}
if arguments.Match(TokenSymbol, ",") != nil {
// Value name is provided
valueToken = arguments.MatchType(TokenIdentifier)
if valueToken == nil {
return nil, arguments.Error("Value name must be an identifier.", nil)
}
}
if arguments.Match(TokenKeyword, "in") == nil {
return nil, arguments.Error("Expected keyword 'in'.", nil)
}
objectEvaluator, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
forNode.objectEvaluator = objectEvaluator
forNode.key = keyToken.Val
if valueToken != nil {
forNode.value = valueToken.Val
}
if arguments.MatchOne(TokenIdentifier, "reversed") != nil {
forNode.reversed = true
}
if arguments.MatchOne(TokenIdentifier, "sorted") != nil {
forNode.sorted = true
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed for-loop arguments.", nil)
}
// Body wrapping
wrapper, endargs, err := doc.WrapUntilTag("empty", "endfor")
if err != nil {
return nil, err
}
forNode.bodyWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
if wrapper.Endtag == "empty" {
// if there's an else in the if-statement, we need the else-Block as well
wrapper, endargs, err = doc.WrapUntilTag("endfor")
if err != nil {
return nil, err
}
forNode.emptyWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
}
return forNode, nil
}
func init() {
RegisterTag("for", tagForParser)
}

View File

@ -1,76 +0,0 @@
package pongo2
type tagIfNode struct {
conditions []IEvaluator
wrappers []*NodeWrapper
}
func (node *tagIfNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
for i, condition := range node.conditions {
result, err := condition.Evaluate(ctx)
if err != nil {
return err
}
if result.IsTrue() {
return node.wrappers[i].Execute(ctx, writer)
}
// Last condition?
if len(node.conditions) == i+1 && len(node.wrappers) > i+1 {
return node.wrappers[i+1].Execute(ctx, writer)
}
}
return nil
}
func tagIfParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ifNode := &tagIfNode{}
// Parse first and main IF condition
condition, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
ifNode.conditions = append(ifNode.conditions, condition)
if arguments.Remaining() > 0 {
return nil, arguments.Error("If-condition is malformed.", nil)
}
// Check the rest
for {
wrapper, tagArgs, err := doc.WrapUntilTag("elif", "else", "endif")
if err != nil {
return nil, err
}
ifNode.wrappers = append(ifNode.wrappers, wrapper)
if wrapper.Endtag == "elif" {
// elif can take a condition
condition, err = tagArgs.ParseExpression()
if err != nil {
return nil, err
}
ifNode.conditions = append(ifNode.conditions, condition)
if tagArgs.Remaining() > 0 {
return nil, tagArgs.Error("Elif-condition is malformed.", nil)
}
} else {
if tagArgs.Count() > 0 {
// else/endif can't take any conditions
return nil, tagArgs.Error("Arguments not allowed here.", nil)
}
}
if wrapper.Endtag == "endif" {
break
}
}
return ifNode, nil
}
func init() {
RegisterTag("if", tagIfParser)
}

View File

@ -1,116 +0,0 @@
package pongo2
import (
"bytes"
)
type tagIfchangedNode struct {
watchedExpr []IEvaluator
lastValues []*Value
lastContent []byte
thenWrapper *NodeWrapper
elseWrapper *NodeWrapper
}
func (node *tagIfchangedNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
if len(node.watchedExpr) == 0 {
// Check against own rendered body
buf := bytes.NewBuffer(make([]byte, 0, 1024)) // 1 KiB
err := node.thenWrapper.Execute(ctx, buf)
if err != nil {
return err
}
bufBytes := buf.Bytes()
if !bytes.Equal(node.lastContent, bufBytes) {
// Rendered content changed, output it
writer.Write(bufBytes)
node.lastContent = bufBytes
}
} else {
nowValues := make([]*Value, 0, len(node.watchedExpr))
for _, expr := range node.watchedExpr {
val, err := expr.Evaluate(ctx)
if err != nil {
return err
}
nowValues = append(nowValues, val)
}
// Compare old to new values now
changed := len(node.lastValues) == 0
for idx, oldVal := range node.lastValues {
if !oldVal.EqualValueTo(nowValues[idx]) {
changed = true
break // we can stop here because ONE value changed
}
}
node.lastValues = nowValues
if changed {
// Render thenWrapper
err := node.thenWrapper.Execute(ctx, writer)
if err != nil {
return err
}
} else {
// Render elseWrapper
err := node.elseWrapper.Execute(ctx, writer)
if err != nil {
return err
}
}
}
return nil
}
func tagIfchangedParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ifchangedNode := &tagIfchangedNode{}
for arguments.Remaining() > 0 {
// Parse condition
expr, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
ifchangedNode.watchedExpr = append(ifchangedNode.watchedExpr, expr)
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Ifchanged-arguments are malformed.", nil)
}
// Wrap then/else-blocks
wrapper, endargs, err := doc.WrapUntilTag("else", "endifchanged")
if err != nil {
return nil, err
}
ifchangedNode.thenWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
if wrapper.Endtag == "else" {
// if there's an else in the if-statement, we need the else-Block as well
wrapper, endargs, err = doc.WrapUntilTag("endifchanged")
if err != nil {
return nil, err
}
ifchangedNode.elseWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
}
return ifchangedNode, nil
}
func init() {
RegisterTag("ifchanged", tagIfchangedParser)
}

View File

@ -1,78 +0,0 @@
package pongo2
type tagIfEqualNode struct {
var1, var2 IEvaluator
thenWrapper *NodeWrapper
elseWrapper *NodeWrapper
}
func (node *tagIfEqualNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
r1, err := node.var1.Evaluate(ctx)
if err != nil {
return err
}
r2, err := node.var2.Evaluate(ctx)
if err != nil {
return err
}
result := r1.EqualValueTo(r2)
if result {
return node.thenWrapper.Execute(ctx, writer)
}
if node.elseWrapper != nil {
return node.elseWrapper.Execute(ctx, writer)
}
return nil
}
func tagIfEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ifequalNode := &tagIfEqualNode{}
// Parse two expressions
var1, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
var2, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
ifequalNode.var1 = var1
ifequalNode.var2 = var2
if arguments.Remaining() > 0 {
return nil, arguments.Error("ifequal only takes 2 arguments.", nil)
}
// Wrap then/else-blocks
wrapper, endargs, err := doc.WrapUntilTag("else", "endifequal")
if err != nil {
return nil, err
}
ifequalNode.thenWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
if wrapper.Endtag == "else" {
// if there's an else in the if-statement, we need the else-Block as well
wrapper, endargs, err = doc.WrapUntilTag("endifequal")
if err != nil {
return nil, err
}
ifequalNode.elseWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
}
return ifequalNode, nil
}
func init() {
RegisterTag("ifequal", tagIfEqualParser)
}

View File

@ -1,78 +0,0 @@
package pongo2
type tagIfNotEqualNode struct {
var1, var2 IEvaluator
thenWrapper *NodeWrapper
elseWrapper *NodeWrapper
}
func (node *tagIfNotEqualNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
r1, err := node.var1.Evaluate(ctx)
if err != nil {
return err
}
r2, err := node.var2.Evaluate(ctx)
if err != nil {
return err
}
result := !r1.EqualValueTo(r2)
if result {
return node.thenWrapper.Execute(ctx, writer)
}
if node.elseWrapper != nil {
return node.elseWrapper.Execute(ctx, writer)
}
return nil
}
func tagIfNotEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ifnotequalNode := &tagIfNotEqualNode{}
// Parse two expressions
var1, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
var2, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
ifnotequalNode.var1 = var1
ifnotequalNode.var2 = var2
if arguments.Remaining() > 0 {
return nil, arguments.Error("ifequal only takes 2 arguments.", nil)
}
// Wrap then/else-blocks
wrapper, endargs, err := doc.WrapUntilTag("else", "endifnotequal")
if err != nil {
return nil, err
}
ifnotequalNode.thenWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
if wrapper.Endtag == "else" {
// if there's an else in the if-statement, we need the else-Block as well
wrapper, endargs, err = doc.WrapUntilTag("endifnotequal")
if err != nil {
return nil, err
}
ifnotequalNode.elseWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
}
return ifnotequalNode, nil
}
func init() {
RegisterTag("ifnotequal", tagIfNotEqualParser)
}

View File

@ -1,84 +0,0 @@
package pongo2
import (
"fmt"
)
type tagImportNode struct {
position *Token
filename string
macros map[string]*tagMacroNode // alias/name -> macro instance
}
func (node *tagImportNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
for name, macro := range node.macros {
func(name string, macro *tagMacroNode) {
ctx.Private[name] = func(args ...*Value) *Value {
return macro.call(ctx, args...)
}
}(name, macro)
}
return nil
}
func tagImportParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
importNode := &tagImportNode{
position: start,
macros: make(map[string]*tagMacroNode),
}
filenameToken := arguments.MatchType(TokenString)
if filenameToken == nil {
return nil, arguments.Error("Import-tag needs a filename as string.", nil)
}
importNode.filename = doc.template.set.resolveFilename(doc.template, filenameToken.Val)
if arguments.Remaining() == 0 {
return nil, arguments.Error("You must at least specify one macro to import.", nil)
}
// Compile the given template
tpl, err := doc.template.set.FromFile(importNode.filename)
if err != nil {
return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, start)
}
for arguments.Remaining() > 0 {
macroNameToken := arguments.MatchType(TokenIdentifier)
if macroNameToken == nil {
return nil, arguments.Error("Expected macro name (identifier).", nil)
}
asName := macroNameToken.Val
if arguments.Match(TokenKeyword, "as") != nil {
aliasToken := arguments.MatchType(TokenIdentifier)
if aliasToken == nil {
return nil, arguments.Error("Expected macro alias name (identifier).", nil)
}
asName = aliasToken.Val
}
macroInstance, has := tpl.exportedMacros[macroNameToken.Val]
if !has {
return nil, arguments.Error(fmt.Sprintf("Macro '%s' not found (or not exported) in '%s'.", macroNameToken.Val,
importNode.filename), macroNameToken)
}
importNode.macros[asName] = macroInstance
if arguments.Remaining() == 0 {
break
}
if arguments.Match(TokenSymbol, ",") == nil {
return nil, arguments.Error("Expected ','.", nil)
}
}
return importNode, nil
}
func init() {
RegisterTag("import", tagImportParser)
}

View File

@ -1,146 +0,0 @@
package pongo2
type tagIncludeNode struct {
tpl *Template
filenameEvaluator IEvaluator
lazy bool
only bool
filename string
withPairs map[string]IEvaluator
ifExists bool
}
func (node *tagIncludeNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
// Building the context for the template
includeCtx := make(Context)
// Fill the context with all data from the parent
if !node.only {
includeCtx.Update(ctx.Public)
includeCtx.Update(ctx.Private)
}
// Put all custom with-pairs into the context
for key, value := range node.withPairs {
val, err := value.Evaluate(ctx)
if err != nil {
return err
}
includeCtx[key] = val
}
// Execute the template
if node.lazy {
// Evaluate the filename
filename, err := node.filenameEvaluator.Evaluate(ctx)
if err != nil {
return err
}
if filename.String() == "" {
return ctx.Error("Filename for 'include'-tag evaluated to an empty string.", nil)
}
// Get include-filename
includedFilename := ctx.template.set.resolveFilename(ctx.template, filename.String())
includedTpl, err2 := ctx.template.set.FromFile(includedFilename)
if err2 != nil {
// if this is ReadFile error, and "if_exists" flag is enabled
if node.ifExists && err2.(*Error).Sender == "fromfile" {
return nil
}
return err2.(*Error)
}
err2 = includedTpl.ExecuteWriter(includeCtx, writer)
if err2 != nil {
return err2.(*Error)
}
return nil
}
// Template is already parsed with static filename
err := node.tpl.ExecuteWriter(includeCtx, writer)
if err != nil {
return err.(*Error)
}
return nil
}
type tagIncludeEmptyNode struct{}
func (node *tagIncludeEmptyNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
return nil
}
func tagIncludeParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
includeNode := &tagIncludeNode{
withPairs: make(map[string]IEvaluator),
}
if filenameToken := arguments.MatchType(TokenString); filenameToken != nil {
// prepared, static template
// "if_exists" flag
ifExists := arguments.Match(TokenIdentifier, "if_exists") != nil
// Get include-filename
includedFilename := doc.template.set.resolveFilename(doc.template, filenameToken.Val)
// Parse the parent
includeNode.filename = includedFilename
includedTpl, err := doc.template.set.FromFile(includedFilename)
if err != nil {
// if this is ReadFile error, and "if_exists" token presents we should create and empty node
if err.(*Error).Sender == "fromfile" && ifExists {
return &tagIncludeEmptyNode{}, nil
}
return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, filenameToken)
}
includeNode.tpl = includedTpl
} else {
// No String, then the user wants to use lazy-evaluation (slower, but possible)
filenameEvaluator, err := arguments.ParseExpression()
if err != nil {
return nil, err.updateFromTokenIfNeeded(doc.template, filenameToken)
}
includeNode.filenameEvaluator = filenameEvaluator
includeNode.lazy = true
includeNode.ifExists = arguments.Match(TokenIdentifier, "if_exists") != nil // "if_exists" flag
}
// After having parsed the filename we're gonna parse the with+only options
if arguments.Match(TokenIdentifier, "with") != nil {
for arguments.Remaining() > 0 {
// We have at least one key=expr pair (because of starting "with")
keyToken := arguments.MatchType(TokenIdentifier)
if keyToken == nil {
return nil, arguments.Error("Expected an identifier", nil)
}
if arguments.Match(TokenSymbol, "=") == nil {
return nil, arguments.Error("Expected '='.", nil)
}
valueExpr, err := arguments.ParseExpression()
if err != nil {
return nil, err.updateFromTokenIfNeeded(doc.template, keyToken)
}
includeNode.withPairs[keyToken.Val] = valueExpr
// Only?
if arguments.Match(TokenIdentifier, "only") != nil {
includeNode.only = true
break // stop parsing arguments because it's the last option
}
}
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed 'include'-tag arguments.", nil)
}
return includeNode, nil
}
func init() {
RegisterTag("include", tagIncludeParser)
}

View File

@ -1,133 +0,0 @@
package pongo2
import (
"math/rand"
"strings"
"time"
"github.com/juju/errors"
)
var (
tagLoremParagraphs = strings.Split(tagLoremText, "\n")
tagLoremWords = strings.Fields(tagLoremText)
)
type tagLoremNode struct {
position *Token
count int // number of paragraphs
method string // w = words, p = HTML paragraphs, b = plain-text (default is b)
random bool // does not use the default paragraph "Lorem ipsum dolor sit amet, ..."
}
func (node *tagLoremNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
switch node.method {
case "b":
if node.random {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString("\n")
}
par := tagLoremParagraphs[rand.Intn(len(tagLoremParagraphs))]
writer.WriteString(par)
}
} else {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString("\n")
}
par := tagLoremParagraphs[i%len(tagLoremParagraphs)]
writer.WriteString(par)
}
}
case "w":
if node.random {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString(" ")
}
word := tagLoremWords[rand.Intn(len(tagLoremWords))]
writer.WriteString(word)
}
} else {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString(" ")
}
word := tagLoremWords[i%len(tagLoremWords)]
writer.WriteString(word)
}
}
case "p":
if node.random {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString("\n")
}
writer.WriteString("<p>")
par := tagLoremParagraphs[rand.Intn(len(tagLoremParagraphs))]
writer.WriteString(par)
writer.WriteString("</p>")
}
} else {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString("\n")
}
writer.WriteString("<p>")
par := tagLoremParagraphs[i%len(tagLoremParagraphs)]
writer.WriteString(par)
writer.WriteString("</p>")
}
}
default:
return ctx.OrigError(errors.Errorf("unsupported method: %s", node.method), nil)
}
return nil
}
func tagLoremParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
loremNode := &tagLoremNode{
position: start,
count: 1,
method: "b",
}
if countToken := arguments.MatchType(TokenNumber); countToken != nil {
loremNode.count = AsValue(countToken.Val).Integer()
}
if methodToken := arguments.MatchType(TokenIdentifier); methodToken != nil {
if methodToken.Val != "w" && methodToken.Val != "p" && methodToken.Val != "b" {
return nil, arguments.Error("lorem-method must be either 'w', 'p' or 'b'.", nil)
}
loremNode.method = methodToken.Val
}
if arguments.MatchOne(TokenIdentifier, "random") != nil {
loremNode.random = true
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed lorem-tag arguments.", nil)
}
return loremNode, nil
}
func init() {
rand.Seed(time.Now().Unix())
RegisterTag("lorem", tagLoremParser)
}
const tagLoremText = `Lorem ipsum dolor sit amet, consectetur adipisici elit, sed eiusmod tempor incidunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquid ex ea commodi consequat. Quis aute iure reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint obcaecat cupiditat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.
Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi.
Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat.
Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis.
At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, At accusam aliquyam diam diam dolore dolores duo eirmod eos erat, et nonumy sed tempor et et invidunt justo labore Stet clita ea et gubergren, kasd magna no rebum. sanctus sea sed takimata ut vero voluptua. est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat.
Consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.`

View File

@ -1,149 +0,0 @@
package pongo2
import (
"bytes"
"fmt"
)
type tagMacroNode struct {
position *Token
name string
argsOrder []string
args map[string]IEvaluator
exported bool
wrapper *NodeWrapper
}
func (node *tagMacroNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
ctx.Private[node.name] = func(args ...*Value) *Value {
return node.call(ctx, args...)
}
return nil
}
func (node *tagMacroNode) call(ctx *ExecutionContext, args ...*Value) *Value {
argsCtx := make(Context)
for k, v := range node.args {
if v == nil {
// User did not provided a default value
argsCtx[k] = nil
} else {
// Evaluate the default value
valueExpr, err := v.Evaluate(ctx)
if err != nil {
ctx.Logf(err.Error())
return AsSafeValue(err.Error())
}
argsCtx[k] = valueExpr
}
}
if len(args) > len(node.argsOrder) {
// Too many arguments, we're ignoring them and just logging into debug mode.
err := ctx.Error(fmt.Sprintf("Macro '%s' called with too many arguments (%d instead of %d).",
node.name, len(args), len(node.argsOrder)), nil).updateFromTokenIfNeeded(ctx.template, node.position)
ctx.Logf(err.Error()) // TODO: This is a workaround, because the error is not returned yet to the Execution()-methods
return AsSafeValue(err.Error())
}
// Make a context for the macro execution
macroCtx := NewChildExecutionContext(ctx)
// Register all arguments in the private context
macroCtx.Private.Update(argsCtx)
for idx, argValue := range args {
macroCtx.Private[node.argsOrder[idx]] = argValue.Interface()
}
var b bytes.Buffer
err := node.wrapper.Execute(macroCtx, &b)
if err != nil {
return AsSafeValue(err.updateFromTokenIfNeeded(ctx.template, node.position).Error())
}
return AsSafeValue(b.String())
}
func tagMacroParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
macroNode := &tagMacroNode{
position: start,
args: make(map[string]IEvaluator),
}
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
return nil, arguments.Error("Macro-tag needs at least an identifier as name.", nil)
}
macroNode.name = nameToken.Val
if arguments.MatchOne(TokenSymbol, "(") == nil {
return nil, arguments.Error("Expected '('.", nil)
}
for arguments.Match(TokenSymbol, ")") == nil {
argNameToken := arguments.MatchType(TokenIdentifier)
if argNameToken == nil {
return nil, arguments.Error("Expected argument name as identifier.", nil)
}
macroNode.argsOrder = append(macroNode.argsOrder, argNameToken.Val)
if arguments.Match(TokenSymbol, "=") != nil {
// Default expression follows
argDefaultExpr, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
macroNode.args[argNameToken.Val] = argDefaultExpr
} else {
// No default expression
macroNode.args[argNameToken.Val] = nil
}
if arguments.Match(TokenSymbol, ")") != nil {
break
}
if arguments.Match(TokenSymbol, ",") == nil {
return nil, arguments.Error("Expected ',' or ')'.", nil)
}
}
if arguments.Match(TokenKeyword, "export") != nil {
macroNode.exported = true
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed macro-tag.", nil)
}
// Body wrapping
wrapper, endargs, err := doc.WrapUntilTag("endmacro")
if err != nil {
return nil, err
}
macroNode.wrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
if macroNode.exported {
// Now register the macro if it wants to be exported
_, has := doc.template.exportedMacros[macroNode.name]
if has {
return nil, doc.Error(fmt.Sprintf("another macro with name '%s' already exported", macroNode.name), start)
}
doc.template.exportedMacros[macroNode.name] = macroNode
}
return macroNode, nil
}
func init() {
RegisterTag("macro", tagMacroParser)
}

View File

@ -1,50 +0,0 @@
package pongo2
import (
"time"
)
type tagNowNode struct {
position *Token
format string
fake bool
}
func (node *tagNowNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
var t time.Time
if node.fake {
t = time.Date(2014, time.February, 05, 18, 31, 45, 00, time.UTC)
} else {
t = time.Now()
}
writer.WriteString(t.Format(node.format))
return nil
}
func tagNowParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
nowNode := &tagNowNode{
position: start,
}
formatToken := arguments.MatchType(TokenString)
if formatToken == nil {
return nil, arguments.Error("Expected a format string.", nil)
}
nowNode.format = formatToken.Val
if arguments.MatchOne(TokenIdentifier, "fake") != nil {
nowNode.fake = true
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed now-tag arguments.", nil)
}
return nowNode, nil
}
func init() {
RegisterTag("now", tagNowParser)
}

View File

@ -1,50 +0,0 @@
package pongo2
type tagSetNode struct {
name string
expression IEvaluator
}
func (node *tagSetNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
// Evaluate expression
value, err := node.expression.Evaluate(ctx)
if err != nil {
return err
}
ctx.Private[node.name] = value
return nil
}
func tagSetParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
node := &tagSetNode{}
// Parse variable name
typeToken := arguments.MatchType(TokenIdentifier)
if typeToken == nil {
return nil, arguments.Error("Expected an identifier.", nil)
}
node.name = typeToken.Val
if arguments.Match(TokenSymbol, "=") == nil {
return nil, arguments.Error("Expected '='.", nil)
}
// Variable expression
keyExpression, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
node.expression = keyExpression
// Remaining arguments
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed 'set'-tag arguments.", nil)
}
return node, nil
}
func init() {
RegisterTag("set", tagSetParser)
}

View File

@ -1,54 +0,0 @@
package pongo2
import (
"bytes"
"regexp"
)
type tagSpacelessNode struct {
wrapper *NodeWrapper
}
var tagSpacelessRegexp = regexp.MustCompile(`(?U:(<.*>))([\t\n\v\f\r ]+)(?U:(<.*>))`)
func (node *tagSpacelessNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
b := bytes.NewBuffer(make([]byte, 0, 1024)) // 1 KiB
err := node.wrapper.Execute(ctx, b)
if err != nil {
return err
}
s := b.String()
// Repeat this recursively
changed := true
for changed {
s2 := tagSpacelessRegexp.ReplaceAllString(s, "$1$3")
changed = s != s2
s = s2
}
writer.WriteString(s)
return nil
}
func tagSpacelessParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
spacelessNode := &tagSpacelessNode{}
wrapper, _, err := doc.WrapUntilTag("endspaceless")
if err != nil {
return nil, err
}
spacelessNode.wrapper = wrapper
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed spaceless-tag arguments.", nil)
}
return spacelessNode, nil
}
func init() {
RegisterTag("spaceless", tagSpacelessParser)
}

View File

@ -1,68 +0,0 @@
package pongo2
import (
"io/ioutil"
)
type tagSSINode struct {
filename string
content string
template *Template
}
func (node *tagSSINode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
if node.template != nil {
// Execute the template within the current context
includeCtx := make(Context)
includeCtx.Update(ctx.Public)
includeCtx.Update(ctx.Private)
err := node.template.execute(includeCtx, writer)
if err != nil {
return err.(*Error)
}
} else {
// Just print out the content
writer.WriteString(node.content)
}
return nil
}
func tagSSIParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
SSINode := &tagSSINode{}
if fileToken := arguments.MatchType(TokenString); fileToken != nil {
SSINode.filename = fileToken.Val
if arguments.Match(TokenIdentifier, "parsed") != nil {
// parsed
temporaryTpl, err := doc.template.set.FromFile(doc.template.set.resolveFilename(doc.template, fileToken.Val))
if err != nil {
return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, fileToken)
}
SSINode.template = temporaryTpl
} else {
// plaintext
buf, err := ioutil.ReadFile(doc.template.set.resolveFilename(doc.template, fileToken.Val))
if err != nil {
return nil, (&Error{
Sender: "tag:ssi",
OrigError: err,
}).updateFromTokenIfNeeded(doc.template, fileToken)
}
SSINode.content = string(buf)
}
} else {
return nil, arguments.Error("First argument must be a string.", nil)
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed SSI-tag argument.", nil)
}
return SSINode, nil
}
func init() {
RegisterTag("ssi", tagSSIParser)
}

View File

@ -1,45 +0,0 @@
package pongo2
type tagTemplateTagNode struct {
content string
}
var templateTagMapping = map[string]string{
"openblock": "{%",
"closeblock": "%}",
"openvariable": "{{",
"closevariable": "}}",
"openbrace": "{",
"closebrace": "}",
"opencomment": "{#",
"closecomment": "#}",
}
func (node *tagTemplateTagNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
writer.WriteString(node.content)
return nil
}
func tagTemplateTagParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ttNode := &tagTemplateTagNode{}
if argToken := arguments.MatchType(TokenIdentifier); argToken != nil {
output, found := templateTagMapping[argToken.Val]
if !found {
return nil, arguments.Error("Argument not found", argToken)
}
ttNode.content = output
} else {
return nil, arguments.Error("Identifier expected.", nil)
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed templatetag-tag argument.", nil)
}
return ttNode, nil
}
func init() {
RegisterTag("templatetag", tagTemplateTagParser)
}

View File

@ -1,83 +0,0 @@
package pongo2
import (
"fmt"
"math"
)
type tagWidthratioNode struct {
position *Token
current, max IEvaluator
width IEvaluator
ctxName string
}
func (node *tagWidthratioNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
current, err := node.current.Evaluate(ctx)
if err != nil {
return err
}
max, err := node.max.Evaluate(ctx)
if err != nil {
return err
}
width, err := node.width.Evaluate(ctx)
if err != nil {
return err
}
value := int(math.Ceil(current.Float()/max.Float()*width.Float() + 0.5))
if node.ctxName == "" {
writer.WriteString(fmt.Sprintf("%d", value))
} else {
ctx.Private[node.ctxName] = value
}
return nil
}
func tagWidthratioParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
widthratioNode := &tagWidthratioNode{
position: start,
}
current, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
widthratioNode.current = current
max, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
widthratioNode.max = max
width, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
widthratioNode.width = width
if arguments.MatchOne(TokenKeyword, "as") != nil {
// Name follows
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
return nil, arguments.Error("Expected name (identifier).", nil)
}
widthratioNode.ctxName = nameToken.Val
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed widthratio-tag arguments.", nil)
}
return widthratioNode, nil
}
func init() {
RegisterTag("widthratio", tagWidthratioParser)
}

View File

@ -1,88 +0,0 @@
package pongo2
type tagWithNode struct {
withPairs map[string]IEvaluator
wrapper *NodeWrapper
}
func (node *tagWithNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
//new context for block
withctx := NewChildExecutionContext(ctx)
// Put all custom with-pairs into the context
for key, value := range node.withPairs {
val, err := value.Evaluate(ctx)
if err != nil {
return err
}
withctx.Private[key] = val
}
return node.wrapper.Execute(withctx, writer)
}
func tagWithParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
withNode := &tagWithNode{
withPairs: make(map[string]IEvaluator),
}
if arguments.Count() == 0 {
return nil, arguments.Error("Tag 'with' requires at least one argument.", nil)
}
wrapper, endargs, err := doc.WrapUntilTag("endwith")
if err != nil {
return nil, err
}
withNode.wrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
// Scan through all arguments to see which style the user uses (old or new style).
// If we find any "as" keyword we will enforce old style; otherwise we will use new style.
oldStyle := false // by default we're using the new_style
for i := 0; i < arguments.Count(); i++ {
if arguments.PeekN(i, TokenKeyword, "as") != nil {
oldStyle = true
break
}
}
for arguments.Remaining() > 0 {
if oldStyle {
valueExpr, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
if arguments.Match(TokenKeyword, "as") == nil {
return nil, arguments.Error("Expected 'as' keyword.", nil)
}
keyToken := arguments.MatchType(TokenIdentifier)
if keyToken == nil {
return nil, arguments.Error("Expected an identifier", nil)
}
withNode.withPairs[keyToken.Val] = valueExpr
} else {
keyToken := arguments.MatchType(TokenIdentifier)
if keyToken == nil {
return nil, arguments.Error("Expected an identifier", nil)
}
if arguments.Match(TokenSymbol, "=") == nil {
return nil, arguments.Error("Expected '='.", nil)
}
valueExpr, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
withNode.withPairs[keyToken.Val] = valueExpr
}
}
return withNode, nil
}
func init() {
RegisterTag("with", tagWithParser)
}

View File

@ -1,277 +0,0 @@
package pongo2
import (
"bytes"
"io"
"strings"
"github.com/juju/errors"
)
type TemplateWriter interface {
io.Writer
WriteString(string) (int, error)
}
type templateWriter struct {
w io.Writer
}
func (tw *templateWriter) WriteString(s string) (int, error) {
return tw.w.Write([]byte(s))
}
func (tw *templateWriter) Write(b []byte) (int, error) {
return tw.w.Write(b)
}
type Template struct {
set *TemplateSet
// Input
isTplString bool
name string
tpl string
size int
// Calculation
tokens []*Token
parser *Parser
// first come, first serve (it's important to not override existing entries in here)
level int
parent *Template
child *Template
blocks map[string]*NodeWrapper
exportedMacros map[string]*tagMacroNode
// Output
root *nodeDocument
// Options allow you to change the behavior of template-engine.
// You can change the options before calling the Execute method.
Options *Options
}
func newTemplateString(set *TemplateSet, tpl []byte) (*Template, error) {
return newTemplate(set, "<string>", true, tpl)
}
func newTemplate(set *TemplateSet, name string, isTplString bool, tpl []byte) (*Template, error) {
strTpl := string(tpl)
// Create the template
t := &Template{
set: set,
isTplString: isTplString,
name: name,
tpl: strTpl,
size: len(strTpl),
blocks: make(map[string]*NodeWrapper),
exportedMacros: make(map[string]*tagMacroNode),
Options: newOptions(),
}
// Copy all settings from another Options.
t.Options.Update(set.Options)
// Tokenize it
tokens, err := lex(name, strTpl)
if err != nil {
return nil, err
}
t.tokens = tokens
// For debugging purposes, show all tokens:
/*for i, t := range tokens {
fmt.Printf("%3d. %s\n", i, t)
}*/
// Parse it
err = t.parse()
if err != nil {
return nil, err
}
return t, nil
}
func (tpl *Template) newContextForExecution(context Context) (*Template, *ExecutionContext, error) {
if tpl.Options.TrimBlocks || tpl.Options.LStripBlocks {
// Issue #94 https://github.com/flosch/pongo2/issues/94
// If an application configures pongo2 template to trim_blocks,
// the first newline after a template tag is removed automatically (like in PHP).
prev := &Token{
Typ: TokenHTML,
Val: "\n",
}
for _, t := range tpl.tokens {
if tpl.Options.LStripBlocks {
if prev.Typ == TokenHTML && t.Typ != TokenHTML && t.Val == "{%" {
prev.Val = strings.TrimRight(prev.Val, "\t ")
}
}
if tpl.Options.TrimBlocks {
if prev.Typ != TokenHTML && t.Typ == TokenHTML && prev.Val == "%}" {
if len(t.Val) > 0 && t.Val[0] == '\n' {
t.Val = t.Val[1:len(t.Val)]
}
}
}
prev = t
}
}
// Determine the parent to be executed (for template inheritance)
parent := tpl
for parent.parent != nil {
parent = parent.parent
}
// Create context if none is given
newContext := make(Context)
newContext.Update(tpl.set.Globals)
if context != nil {
newContext.Update(context)
if len(newContext) > 0 {
// Check for context name syntax
err := newContext.checkForValidIdentifiers()
if err != nil {
return parent, nil, err
}
// Check for clashes with macro names
for k := range newContext {
_, has := tpl.exportedMacros[k]
if has {
return parent, nil, &Error{
Filename: tpl.name,
Sender: "execution",
OrigError: errors.Errorf("context key name '%s' clashes with macro '%s'", k, k),
}
}
}
}
}
// Create operational context
ctx := newExecutionContext(parent, newContext)
return parent, ctx, nil
}
func (tpl *Template) execute(context Context, writer TemplateWriter) error {
parent, ctx, err := tpl.newContextForExecution(context)
if err != nil {
return err
}
// Run the selected document
if err := parent.root.Execute(ctx, writer); err != nil {
return err
}
return nil
}
func (tpl *Template) newTemplateWriterAndExecute(context Context, writer io.Writer) error {
return tpl.execute(context, &templateWriter{w: writer})
}
func (tpl *Template) newBufferAndExecute(context Context) (*bytes.Buffer, error) {
// Create output buffer
// We assume that the rendered template will be 30% larger
buffer := bytes.NewBuffer(make([]byte, 0, int(float64(tpl.size)*1.3)))
if err := tpl.execute(context, buffer); err != nil {
return nil, err
}
return buffer, nil
}
// Executes the template with the given context and writes to writer (io.Writer)
// on success. Context can be nil. Nothing is written on error; instead the error
// is being returned.
func (tpl *Template) ExecuteWriter(context Context, writer io.Writer) error {
buf, err := tpl.newBufferAndExecute(context)
if err != nil {
return err
}
_, err = buf.WriteTo(writer)
if err != nil {
return err
}
return nil
}
// Same as ExecuteWriter. The only difference between both functions is that
// this function might already have written parts of the generated template in the
// case of an execution error because there's no intermediate buffer involved for
// performance reasons. This is handy if you need high performance template
// generation or if you want to manage your own pool of buffers.
func (tpl *Template) ExecuteWriterUnbuffered(context Context, writer io.Writer) error {
return tpl.newTemplateWriterAndExecute(context, writer)
}
// Executes the template and returns the rendered template as a []byte
func (tpl *Template) ExecuteBytes(context Context) ([]byte, error) {
// Execute template
buffer, err := tpl.newBufferAndExecute(context)
if err != nil {
return nil, err
}
return buffer.Bytes(), nil
}
// Executes the template and returns the rendered template as a string
func (tpl *Template) Execute(context Context) (string, error) {
// Execute template
buffer, err := tpl.newBufferAndExecute(context)
if err != nil {
return "", err
}
return buffer.String(), nil
}
func (tpl *Template) ExecuteBlocks(context Context, blocks []string) (map[string]string, error) {
var parents []*Template
result := make(map[string]string)
parent := tpl
for parent != nil {
parents = append(parents, parent)
parent = parent.parent
}
for _, t := range parents {
buffer := bytes.NewBuffer(make([]byte, 0, int(float64(t.size)*1.3)))
_, ctx, err := t.newContextForExecution(context)
if err != nil {
return nil, err
}
for _, blockName := range blocks {
if _, ok := result[blockName]; ok {
continue
}
if blockWrapper, ok := t.blocks[blockName]; ok {
bErr := blockWrapper.Execute(ctx, buffer)
if bErr != nil {
return nil, bErr
}
result[blockName] = buffer.String()
buffer.Reset()
}
}
// We have found all blocks
if len(blocks) == len(result) {
break
}
}
return result, nil
}

View File

@ -1,157 +0,0 @@
package pongo2
import (
"bytes"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"github.com/juju/errors"
)
// LocalFilesystemLoader represents a local filesystem loader with basic
// BaseDirectory capabilities. The access to the local filesystem is unrestricted.
type LocalFilesystemLoader struct {
baseDir string
}
// MustNewLocalFileSystemLoader creates a new LocalFilesystemLoader instance
// and panics if there's any error during instantiation. The parameters
// are the same like NewLocalFileSystemLoader.
func MustNewLocalFileSystemLoader(baseDir string) *LocalFilesystemLoader {
fs, err := NewLocalFileSystemLoader(baseDir)
if err != nil {
log.Panic(err)
}
return fs
}
// NewLocalFileSystemLoader creates a new LocalFilesystemLoader and allows
// templatesto be loaded from disk (unrestricted). If any base directory
// is given (or being set using SetBaseDir), this base directory is being used
// for path calculation in template inclusions/imports. Otherwise the path
// is calculated based relatively to the including template's path.
func NewLocalFileSystemLoader(baseDir string) (*LocalFilesystemLoader, error) {
fs := &LocalFilesystemLoader{}
if baseDir != "" {
if err := fs.SetBaseDir(baseDir); err != nil {
return nil, err
}
}
return fs, nil
}
// SetBaseDir sets the template's base directory. This directory will
// be used for any relative path in filters, tags and From*-functions to determine
// your template. See the comment for NewLocalFileSystemLoader as well.
func (fs *LocalFilesystemLoader) SetBaseDir(path string) error {
// Make the path absolute
if !filepath.IsAbs(path) {
abs, err := filepath.Abs(path)
if err != nil {
return err
}
path = abs
}
// Check for existence
fi, err := os.Stat(path)
if err != nil {
return err
}
if !fi.IsDir() {
return errors.Errorf("The given path '%s' is not a directory.", path)
}
fs.baseDir = path
return nil
}
// Get reads the path's content from your local filesystem.
func (fs *LocalFilesystemLoader) Get(path string) (io.Reader, error) {
buf, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
return bytes.NewReader(buf), nil
}
// Abs resolves a filename relative to the base directory. Absolute paths are allowed.
// When there's no base dir set, the absolute path to the filename
// will be calculated based on either the provided base directory (which
// might be a path of a template which includes another template) or
// the current working directory.
func (fs *LocalFilesystemLoader) Abs(base, name string) string {
if filepath.IsAbs(name) {
return name
}
// Our own base dir has always priority; if there's none
// we use the path provided in base.
var err error
if fs.baseDir == "" {
if base == "" {
base, err = os.Getwd()
if err != nil {
panic(err)
}
return filepath.Join(base, name)
}
return filepath.Join(filepath.Dir(base), name)
}
return filepath.Join(fs.baseDir, name)
}
// SandboxedFilesystemLoader is still WIP.
type SandboxedFilesystemLoader struct {
*LocalFilesystemLoader
}
// NewSandboxedFilesystemLoader creates a new sandboxed local file system instance.
func NewSandboxedFilesystemLoader(baseDir string) (*SandboxedFilesystemLoader, error) {
fs, err := NewLocalFileSystemLoader(baseDir)
if err != nil {
return nil, err
}
return &SandboxedFilesystemLoader{
LocalFilesystemLoader: fs,
}, nil
}
// Move sandbox to a virtual fs
/*
if len(set.SandboxDirectories) > 0 {
defer func() {
// Remove any ".." or other crap
resolvedPath = filepath.Clean(resolvedPath)
// Make the path absolute
absPath, err := filepath.Abs(resolvedPath)
if err != nil {
panic(err)
}
resolvedPath = absPath
// Check against the sandbox directories (once one pattern matches, we're done and can allow it)
for _, pattern := range set.SandboxDirectories {
matched, err := filepath.Match(pattern, resolvedPath)
if err != nil {
panic("Wrong sandbox directory match pattern (see http://golang.org/pkg/path/filepath/#Match).")
}
if matched {
// OK!
return
}
}
// No pattern matched, we have to log+deny the request
set.logf("Access attempt outside of the sandbox directories (blocked): '%s'", resolvedPath)
resolvedPath = ""
}()
}
*/

View File

@ -1,305 +0,0 @@
package pongo2
import (
"fmt"
"io"
"io/ioutil"
"log"
"os"
"sync"
"github.com/juju/errors"
)
// TemplateLoader allows to implement a virtual file system.
type TemplateLoader interface {
// Abs calculates the path to a given template. Whenever a path must be resolved
// due to an import from another template, the base equals the parent template's path.
Abs(base, name string) string
// Get returns an io.Reader where the template's content can be read from.
Get(path string) (io.Reader, error)
}
// TemplateSet allows you to create your own group of templates with their own
// global context (which is shared among all members of the set) and their own
// configuration.
// It's useful for a separation of different kind of templates
// (e. g. web templates vs. mail templates).
type TemplateSet struct {
name string
loaders []TemplateLoader
// Globals will be provided to all templates created within this template set
Globals Context
// If debug is true (default false), ExecutionContext.Logf() will work and output
// to STDOUT. Furthermore, FromCache() won't cache the templates.
// Make sure to synchronize the access to it in case you're changing this
// variable during program execution (and template compilation/execution).
Debug bool
// Options allow you to change the behavior of template-engine.
// You can change the options before calling the Execute method.
Options *Options
// Sandbox features
// - Disallow access to specific tags and/or filters (using BanTag() and BanFilter())
//
// For efficiency reasons you can ban tags/filters only *before* you have
// added your first template to the set (restrictions are statically checked).
// After you added one, it's not possible anymore (for your personal security).
firstTemplateCreated bool
bannedTags map[string]bool
bannedFilters map[string]bool
// Template cache (for FromCache())
templateCache map[string]*Template
templateCacheMutex sync.Mutex
}
// NewSet can be used to create sets with different kind of templates
// (e. g. web from mail templates), with different globals or
// other configurations.
func NewSet(name string, loaders ...TemplateLoader) *TemplateSet {
if len(loaders) == 0 {
panic(fmt.Errorf("at least one template loader must be specified"))
}
return &TemplateSet{
name: name,
loaders: loaders,
Globals: make(Context),
bannedTags: make(map[string]bool),
bannedFilters: make(map[string]bool),
templateCache: make(map[string]*Template),
Options: newOptions(),
}
}
func (set *TemplateSet) AddLoader(loaders ...TemplateLoader) {
set.loaders = append(set.loaders, loaders...)
}
func (set *TemplateSet) resolveFilename(tpl *Template, path string) string {
return set.resolveFilenameForLoader(set.loaders[0], tpl, path)
}
func (set *TemplateSet) resolveFilenameForLoader(loader TemplateLoader, tpl *Template, path string) string {
name := ""
if tpl != nil && tpl.isTplString {
return path
}
if tpl != nil {
name = tpl.name
}
return loader.Abs(name, path)
}
// BanTag bans a specific tag for this template set. See more in the documentation for TemplateSet.
func (set *TemplateSet) BanTag(name string) error {
_, has := tags[name]
if !has {
return errors.Errorf("tag '%s' not found", name)
}
if set.firstTemplateCreated {
return errors.New("you cannot ban any tags after you've added your first template to your template set")
}
_, has = set.bannedTags[name]
if has {
return errors.Errorf("tag '%s' is already banned", name)
}
set.bannedTags[name] = true
return nil
}
// BanFilter bans a specific filter for this template set. See more in the documentation for TemplateSet.
func (set *TemplateSet) BanFilter(name string) error {
_, has := filters[name]
if !has {
return errors.Errorf("filter '%s' not found", name)
}
if set.firstTemplateCreated {
return errors.New("you cannot ban any filters after you've added your first template to your template set")
}
_, has = set.bannedFilters[name]
if has {
return errors.Errorf("filter '%s' is already banned", name)
}
set.bannedFilters[name] = true
return nil
}
func (set *TemplateSet) resolveTemplate(tpl *Template, path string) (name string, loader TemplateLoader, fd io.Reader, err error) {
// iterate over loaders until we appear to have a valid template
for _, loader = range set.loaders {
name = set.resolveFilenameForLoader(loader, tpl, path)
fd, err = loader.Get(name)
if err == nil {
return
}
}
return path, nil, nil, fmt.Errorf("unable to resolve template")
}
// CleanCache cleans the template cache. If filenames is not empty,
// it will remove the template caches of those filenames.
// Or it will empty the whole template cache. It is thread-safe.
func (set *TemplateSet) CleanCache(filenames ...string) {
set.templateCacheMutex.Lock()
defer set.templateCacheMutex.Unlock()
if len(filenames) == 0 {
set.templateCache = make(map[string]*Template, len(set.templateCache))
}
for _, filename := range filenames {
delete(set.templateCache, set.resolveFilename(nil, filename))
}
}
// FromCache is a convenient method to cache templates. It is thread-safe
// and will only compile the template associated with a filename once.
// If TemplateSet.Debug is true (for example during development phase),
// FromCache() will not cache the template and instead recompile it on any
// call (to make changes to a template live instantaneously).
func (set *TemplateSet) FromCache(filename string) (*Template, error) {
if set.Debug {
// Recompile on any request
return set.FromFile(filename)
}
// Cache the template
cleanedFilename := set.resolveFilename(nil, filename)
set.templateCacheMutex.Lock()
defer set.templateCacheMutex.Unlock()
tpl, has := set.templateCache[cleanedFilename]
// Cache miss
if !has {
tpl, err := set.FromFile(cleanedFilename)
if err != nil {
return nil, err
}
set.templateCache[cleanedFilename] = tpl
return tpl, nil
}
// Cache hit
return tpl, nil
}
// FromString loads a template from string and returns a Template instance.
func (set *TemplateSet) FromString(tpl string) (*Template, error) {
set.firstTemplateCreated = true
return newTemplateString(set, []byte(tpl))
}
// FromBytes loads a template from bytes and returns a Template instance.
func (set *TemplateSet) FromBytes(tpl []byte) (*Template, error) {
set.firstTemplateCreated = true
return newTemplateString(set, tpl)
}
// FromFile loads a template from a filename and returns a Template instance.
func (set *TemplateSet) FromFile(filename string) (*Template, error) {
set.firstTemplateCreated = true
_, _, fd, err := set.resolveTemplate(nil, filename)
if err != nil {
return nil, &Error{
Filename: filename,
Sender: "fromfile",
OrigError: err,
}
}
buf, err := ioutil.ReadAll(fd)
if err != nil {
return nil, &Error{
Filename: filename,
Sender: "fromfile",
OrigError: err,
}
}
return newTemplate(set, filename, false, buf)
}
// RenderTemplateString is a shortcut and renders a template string directly.
func (set *TemplateSet) RenderTemplateString(s string, ctx Context) (string, error) {
set.firstTemplateCreated = true
tpl := Must(set.FromString(s))
result, err := tpl.Execute(ctx)
if err != nil {
return "", err
}
return result, nil
}
// RenderTemplateBytes is a shortcut and renders template bytes directly.
func (set *TemplateSet) RenderTemplateBytes(b []byte, ctx Context) (string, error) {
set.firstTemplateCreated = true
tpl := Must(set.FromBytes(b))
result, err := tpl.Execute(ctx)
if err != nil {
return "", err
}
return result, nil
}
// RenderTemplateFile is a shortcut and renders a template file directly.
func (set *TemplateSet) RenderTemplateFile(fn string, ctx Context) (string, error) {
set.firstTemplateCreated = true
tpl := Must(set.FromFile(fn))
result, err := tpl.Execute(ctx)
if err != nil {
return "", err
}
return result, nil
}
func (set *TemplateSet) logf(format string, args ...interface{}) {
if set.Debug {
logger.Printf(fmt.Sprintf("[template set: %s] %s", set.name, format), args...)
}
}
// Logging function (internally used)
func logf(format string, items ...interface{}) {
if debug {
logger.Printf(format, items...)
}
}
var (
debug bool // internal debugging
logger = log.New(os.Stdout, "[pongo2] ", log.LstdFlags|log.Lshortfile)
// DefaultLoader allows the default un-sandboxed access to the local file
// system and is being used by the DefaultSet.
DefaultLoader = MustNewLocalFileSystemLoader("")
// DefaultSet is a set created for you for convinience reasons.
DefaultSet = NewSet("default", DefaultLoader)
// Methods on the default set
FromString = DefaultSet.FromString
FromBytes = DefaultSet.FromBytes
FromFile = DefaultSet.FromFile
FromCache = DefaultSet.FromCache
RenderTemplateString = DefaultSet.RenderTemplateString
RenderTemplateFile = DefaultSet.RenderTemplateFile
// Globals for the default set
Globals = DefaultSet.Globals
)

View File

@ -1,520 +0,0 @@
package pongo2
import (
"fmt"
"reflect"
"sort"
"strconv"
"strings"
)
type Value struct {
val reflect.Value
safe bool // used to indicate whether a Value needs explicit escaping in the template
}
// AsValue converts any given value to a pongo2.Value
// Usually being used within own functions passed to a template
// through a Context or within filter functions.
//
// Example:
// AsValue("my string")
func AsValue(i interface{}) *Value {
return &Value{
val: reflect.ValueOf(i),
}
}
// AsSafeValue works like AsValue, but does not apply the 'escape' filter.
func AsSafeValue(i interface{}) *Value {
return &Value{
val: reflect.ValueOf(i),
safe: true,
}
}
func (v *Value) getResolvedValue() reflect.Value {
if v.val.IsValid() && v.val.Kind() == reflect.Ptr {
return v.val.Elem()
}
return v.val
}
// IsString checks whether the underlying value is a string
func (v *Value) IsString() bool {
return v.getResolvedValue().Kind() == reflect.String
}
// IsBool checks whether the underlying value is a bool
func (v *Value) IsBool() bool {
return v.getResolvedValue().Kind() == reflect.Bool
}
// IsFloat checks whether the underlying value is a float
func (v *Value) IsFloat() bool {
return v.getResolvedValue().Kind() == reflect.Float32 ||
v.getResolvedValue().Kind() == reflect.Float64
}
// IsInteger checks whether the underlying value is an integer
func (v *Value) IsInteger() bool {
return v.getResolvedValue().Kind() == reflect.Int ||
v.getResolvedValue().Kind() == reflect.Int8 ||
v.getResolvedValue().Kind() == reflect.Int16 ||
v.getResolvedValue().Kind() == reflect.Int32 ||
v.getResolvedValue().Kind() == reflect.Int64 ||
v.getResolvedValue().Kind() == reflect.Uint ||
v.getResolvedValue().Kind() == reflect.Uint8 ||
v.getResolvedValue().Kind() == reflect.Uint16 ||
v.getResolvedValue().Kind() == reflect.Uint32 ||
v.getResolvedValue().Kind() == reflect.Uint64
}
// IsNumber checks whether the underlying value is either an integer
// or a float.
func (v *Value) IsNumber() bool {
return v.IsInteger() || v.IsFloat()
}
// IsNil checks whether the underlying value is NIL
func (v *Value) IsNil() bool {
//fmt.Printf("%+v\n", v.getResolvedValue().Type().String())
return !v.getResolvedValue().IsValid()
}
// String returns a string for the underlying value. If this value is not
// of type string, pongo2 tries to convert it. Currently the following
// types for underlying values are supported:
//
// 1. string
// 2. int/uint (any size)
// 3. float (any precision)
// 4. bool
// 5. time.Time
// 6. String() will be called on the underlying value if provided
//
// NIL values will lead to an empty string. Unsupported types are leading
// to their respective type name.
func (v *Value) String() string {
if v.IsNil() {
return ""
}
switch v.getResolvedValue().Kind() {
case reflect.String:
return v.getResolvedValue().String()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return strconv.FormatInt(v.getResolvedValue().Int(), 10)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return strconv.FormatUint(v.getResolvedValue().Uint(), 10)
case reflect.Float32, reflect.Float64:
return fmt.Sprintf("%f", v.getResolvedValue().Float())
case reflect.Bool:
if v.Bool() {
return "True"
}
return "False"
case reflect.Struct:
if t, ok := v.Interface().(fmt.Stringer); ok {
return t.String()
}
}
logf("Value.String() not implemented for type: %s\n", v.getResolvedValue().Kind().String())
return v.getResolvedValue().String()
}
// Integer returns the underlying value as an integer (converts the underlying
// value, if necessary). If it's not possible to convert the underlying value,
// it will return 0.
func (v *Value) Integer() int {
switch v.getResolvedValue().Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return int(v.getResolvedValue().Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return int(v.getResolvedValue().Uint())
case reflect.Float32, reflect.Float64:
return int(v.getResolvedValue().Float())
case reflect.String:
// Try to convert from string to int (base 10)
f, err := strconv.ParseFloat(v.getResolvedValue().String(), 64)
if err != nil {
return 0
}
return int(f)
default:
logf("Value.Integer() not available for type: %s\n", v.getResolvedValue().Kind().String())
return 0
}
}
// Float returns the underlying value as a float (converts the underlying
// value, if necessary). If it's not possible to convert the underlying value,
// it will return 0.0.
func (v *Value) Float() float64 {
switch v.getResolvedValue().Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return float64(v.getResolvedValue().Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return float64(v.getResolvedValue().Uint())
case reflect.Float32, reflect.Float64:
return v.getResolvedValue().Float()
case reflect.String:
// Try to convert from string to float64 (base 10)
f, err := strconv.ParseFloat(v.getResolvedValue().String(), 64)
if err != nil {
return 0.0
}
return f
default:
logf("Value.Float() not available for type: %s\n", v.getResolvedValue().Kind().String())
return 0.0
}
}
// Bool returns the underlying value as bool. If the value is not bool, false
// will always be returned. If you're looking for true/false-evaluation of the
// underlying value, have a look on the IsTrue()-function.
func (v *Value) Bool() bool {
switch v.getResolvedValue().Kind() {
case reflect.Bool:
return v.getResolvedValue().Bool()
default:
logf("Value.Bool() not available for type: %s\n", v.getResolvedValue().Kind().String())
return false
}
}
// IsTrue tries to evaluate the underlying value the Pythonic-way:
//
// Returns TRUE in one the following cases:
//
// * int != 0
// * uint != 0
// * float != 0.0
// * len(array/chan/map/slice/string) > 0
// * bool == true
// * underlying value is a struct
//
// Otherwise returns always FALSE.
func (v *Value) IsTrue() bool {
switch v.getResolvedValue().Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v.getResolvedValue().Int() != 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return v.getResolvedValue().Uint() != 0
case reflect.Float32, reflect.Float64:
return v.getResolvedValue().Float() != 0
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
return v.getResolvedValue().Len() > 0
case reflect.Bool:
return v.getResolvedValue().Bool()
case reflect.Struct:
return true // struct instance is always true
default:
logf("Value.IsTrue() not available for type: %s\n", v.getResolvedValue().Kind().String())
return false
}
}
// Negate tries to negate the underlying value. It's mainly used for
// the NOT-operator and in conjunction with a call to
// return_value.IsTrue() afterwards.
//
// Example:
// AsValue(1).Negate().IsTrue() == false
func (v *Value) Negate() *Value {
switch v.getResolvedValue().Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
if v.Integer() != 0 {
return AsValue(0)
}
return AsValue(1)
case reflect.Float32, reflect.Float64:
if v.Float() != 0.0 {
return AsValue(float64(0.0))
}
return AsValue(float64(1.1))
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
return AsValue(v.getResolvedValue().Len() == 0)
case reflect.Bool:
return AsValue(!v.getResolvedValue().Bool())
case reflect.Struct:
return AsValue(false)
default:
logf("Value.IsTrue() not available for type: %s\n", v.getResolvedValue().Kind().String())
return AsValue(true)
}
}
// Len returns the length for an array, chan, map, slice or string.
// Otherwise it will return 0.
func (v *Value) Len() int {
switch v.getResolvedValue().Kind() {
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice:
return v.getResolvedValue().Len()
case reflect.String:
runes := []rune(v.getResolvedValue().String())
return len(runes)
default:
logf("Value.Len() not available for type: %s\n", v.getResolvedValue().Kind().String())
return 0
}
}
// Slice slices an array, slice or string. Otherwise it will
// return an empty []int.
func (v *Value) Slice(i, j int) *Value {
switch v.getResolvedValue().Kind() {
case reflect.Array, reflect.Slice:
return AsValue(v.getResolvedValue().Slice(i, j).Interface())
case reflect.String:
runes := []rune(v.getResolvedValue().String())
return AsValue(string(runes[i:j]))
default:
logf("Value.Slice() not available for type: %s\n", v.getResolvedValue().Kind().String())
return AsValue([]int{})
}
}
// Index gets the i-th item of an array, slice or string. Otherwise
// it will return NIL.
func (v *Value) Index(i int) *Value {
switch v.getResolvedValue().Kind() {
case reflect.Array, reflect.Slice:
if i >= v.Len() {
return AsValue(nil)
}
return AsValue(v.getResolvedValue().Index(i).Interface())
case reflect.String:
//return AsValue(v.getResolvedValue().Slice(i, i+1).Interface())
s := v.getResolvedValue().String()
runes := []rune(s)
if i < len(runes) {
return AsValue(string(runes[i]))
}
return AsValue("")
default:
logf("Value.Slice() not available for type: %s\n", v.getResolvedValue().Kind().String())
return AsValue([]int{})
}
}
// Contains checks whether the underlying value (which must be of type struct, map,
// string, array or slice) contains of another Value (e. g. used to check
// whether a struct contains of a specific field or a map contains a specific key).
//
// Example:
// AsValue("Hello, World!").Contains(AsValue("World")) == true
func (v *Value) Contains(other *Value) bool {
switch v.getResolvedValue().Kind() {
case reflect.Struct:
fieldValue := v.getResolvedValue().FieldByName(other.String())
return fieldValue.IsValid()
case reflect.Map:
var mapValue reflect.Value
switch other.Interface().(type) {
case int:
mapValue = v.getResolvedValue().MapIndex(other.getResolvedValue())
case string:
mapValue = v.getResolvedValue().MapIndex(other.getResolvedValue())
default:
logf("Value.Contains() does not support lookup type '%s'\n", other.getResolvedValue().Kind().String())
return false
}
return mapValue.IsValid()
case reflect.String:
return strings.Contains(v.getResolvedValue().String(), other.String())
case reflect.Slice, reflect.Array:
for i := 0; i < v.getResolvedValue().Len(); i++ {
item := v.getResolvedValue().Index(i)
if other.Interface() == item.Interface() {
return true
}
}
return false
default:
logf("Value.Contains() not available for type: %s\n", v.getResolvedValue().Kind().String())
return false
}
}
// CanSlice checks whether the underlying value is of type array, slice or string.
// You normally would use CanSlice() before using the Slice() operation.
func (v *Value) CanSlice() bool {
switch v.getResolvedValue().Kind() {
case reflect.Array, reflect.Slice, reflect.String:
return true
}
return false
}
// Iterate iterates over a map, array, slice or a string. It calls the
// function's first argument for every value with the following arguments:
//
// idx current 0-index
// count total amount of items
// key *Value for the key or item
// value *Value (only for maps, the respective value for a specific key)
//
// If the underlying value has no items or is not one of the types above,
// the empty function (function's second argument) will be called.
func (v *Value) Iterate(fn func(idx, count int, key, value *Value) bool, empty func()) {
v.IterateOrder(fn, empty, false, false)
}
// IterateOrder behaves like Value.Iterate, but can iterate through an array/slice/string in reverse. Does
// not affect the iteration through a map because maps don't have any particular order.
// However, you can force an order using the `sorted` keyword (and even use `reversed sorted`).
func (v *Value) IterateOrder(fn func(idx, count int, key, value *Value) bool, empty func(), reverse bool, sorted bool) {
switch v.getResolvedValue().Kind() {
case reflect.Map:
keys := sortedKeys(v.getResolvedValue().MapKeys())
if sorted {
if reverse {
sort.Sort(sort.Reverse(keys))
} else {
sort.Sort(keys)
}
}
keyLen := len(keys)
for idx, key := range keys {
value := v.getResolvedValue().MapIndex(key)
if !fn(idx, keyLen, &Value{val: key}, &Value{val: value}) {
return
}
}
if keyLen == 0 {
empty()
}
return // done
case reflect.Array, reflect.Slice:
var items valuesList
itemCount := v.getResolvedValue().Len()
for i := 0; i < itemCount; i++ {
items = append(items, &Value{val: v.getResolvedValue().Index(i)})
}
if sorted {
if reverse {
sort.Sort(sort.Reverse(items))
} else {
sort.Sort(items)
}
} else {
if reverse {
for i := 0; i < itemCount/2; i++ {
items[i], items[itemCount-1-i] = items[itemCount-1-i], items[i]
}
}
}
if len(items) > 0 {
for idx, item := range items {
if !fn(idx, itemCount, item, nil) {
return
}
}
} else {
empty()
}
return // done
case reflect.String:
if sorted {
// TODO(flosch): Handle sorted
panic("TODO: handle sort for type string")
}
// TODO(flosch): Not utf8-compatible (utf8-decoding necessary)
charCount := v.getResolvedValue().Len()
if charCount > 0 {
if reverse {
for i := charCount - 1; i >= 0; i-- {
if !fn(i, charCount, &Value{val: v.getResolvedValue().Slice(i, i+1)}, nil) {
return
}
}
} else {
for i := 0; i < charCount; i++ {
if !fn(i, charCount, &Value{val: v.getResolvedValue().Slice(i, i+1)}, nil) {
return
}
}
}
} else {
empty()
}
return // done
default:
logf("Value.Iterate() not available for type: %s\n", v.getResolvedValue().Kind().String())
}
empty()
}
// Interface gives you access to the underlying value.
func (v *Value) Interface() interface{} {
if v.val.IsValid() {
return v.val.Interface()
}
return nil
}
// EqualValueTo checks whether two values are containing the same value or object.
func (v *Value) EqualValueTo(other *Value) bool {
// comparison of uint with int fails using .Interface()-comparison (see issue #64)
if v.IsInteger() && other.IsInteger() {
return v.Integer() == other.Integer()
}
return v.Interface() == other.Interface()
}
type sortedKeys []reflect.Value
func (sk sortedKeys) Len() int {
return len(sk)
}
func (sk sortedKeys) Less(i, j int) bool {
vi := &Value{val: sk[i]}
vj := &Value{val: sk[j]}
switch {
case vi.IsInteger() && vj.IsInteger():
return vi.Integer() < vj.Integer()
case vi.IsFloat() && vj.IsFloat():
return vi.Float() < vj.Float()
default:
return vi.String() < vj.String()
}
}
func (sk sortedKeys) Swap(i, j int) {
sk[i], sk[j] = sk[j], sk[i]
}
type valuesList []*Value
func (vl valuesList) Len() int {
return len(vl)
}
func (vl valuesList) Less(i, j int) bool {
vi := vl[i]
vj := vl[j]
switch {
case vi.IsInteger() && vj.IsInteger():
return vi.Integer() < vj.Integer()
case vi.IsFloat() && vj.IsFloat():
return vi.Float() < vj.Float()
default:
return vi.String() < vj.String()
}
}
func (vl valuesList) Swap(i, j int) {
vl[i], vl[j] = vl[j], vl[i]
}

View File

@ -1,695 +0,0 @@
package pongo2
import (
"fmt"
"reflect"
"strconv"
"strings"
"github.com/juju/errors"
)
const (
varTypeInt = iota
varTypeIdent
)
var (
typeOfValuePtr = reflect.TypeOf(new(Value))
typeOfExecCtxPtr = reflect.TypeOf(new(ExecutionContext))
)
type variablePart struct {
typ int
s string
i int
isFunctionCall bool
callingArgs []functionCallArgument // needed for a function call, represents all argument nodes (INode supports nested function calls)
}
type functionCallArgument interface {
Evaluate(*ExecutionContext) (*Value, *Error)
}
// TODO: Add location tokens
type stringResolver struct {
locationToken *Token
val string
}
type intResolver struct {
locationToken *Token
val int
}
type floatResolver struct {
locationToken *Token
val float64
}
type boolResolver struct {
locationToken *Token
val bool
}
type variableResolver struct {
locationToken *Token
parts []*variablePart
}
type nodeFilteredVariable struct {
locationToken *Token
resolver IEvaluator
filterChain []*filterCall
}
type nodeVariable struct {
locationToken *Token
expr IEvaluator
}
type executionCtxEval struct{}
func (v *nodeFilteredVariable) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := v.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (vr *variableResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := vr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (s *stringResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := s.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (i *intResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := i.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (f *floatResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := f.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (b *boolResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := b.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
return nil
}
func (v *nodeFilteredVariable) GetPositionToken() *Token {
return v.locationToken
}
func (vr *variableResolver) GetPositionToken() *Token {
return vr.locationToken
}
func (s *stringResolver) GetPositionToken() *Token {
return s.locationToken
}
func (i *intResolver) GetPositionToken() *Token {
return i.locationToken
}
func (f *floatResolver) GetPositionToken() *Token {
return f.locationToken
}
func (b *boolResolver) GetPositionToken() *Token {
return b.locationToken
}
func (s *stringResolver) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
return AsValue(s.val), nil
}
func (i *intResolver) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
return AsValue(i.val), nil
}
func (f *floatResolver) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
return AsValue(f.val), nil
}
func (b *boolResolver) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
return AsValue(b.val), nil
}
func (s *stringResolver) FilterApplied(name string) bool {
return false
}
func (i *intResolver) FilterApplied(name string) bool {
return false
}
func (f *floatResolver) FilterApplied(name string) bool {
return false
}
func (b *boolResolver) FilterApplied(name string) bool {
return false
}
func (nv *nodeVariable) FilterApplied(name string) bool {
return nv.expr.FilterApplied(name)
}
func (nv *nodeVariable) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := nv.expr.Evaluate(ctx)
if err != nil {
return err
}
if !nv.expr.FilterApplied("safe") && !value.safe && value.IsString() && ctx.Autoescape {
// apply escape filter
value, err = filters["escape"](value, nil)
if err != nil {
return err
}
}
writer.WriteString(value.String())
return nil
}
func (executionCtxEval) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
return AsValue(ctx), nil
}
func (vr *variableResolver) FilterApplied(name string) bool {
return false
}
func (vr *variableResolver) String() string {
parts := make([]string, 0, len(vr.parts))
for _, p := range vr.parts {
switch p.typ {
case varTypeInt:
parts = append(parts, strconv.Itoa(p.i))
case varTypeIdent:
parts = append(parts, p.s)
default:
panic("unimplemented")
}
}
return strings.Join(parts, ".")
}
func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
var current reflect.Value
var isSafe bool
for idx, part := range vr.parts {
if idx == 0 {
// We're looking up the first part of the variable.
// First we're having a look in our private
// context (e. g. information provided by tags, like the forloop)
val, inPrivate := ctx.Private[vr.parts[0].s]
if !inPrivate {
// Nothing found? Then have a final lookup in the public context
val = ctx.Public[vr.parts[0].s]
}
current = reflect.ValueOf(val) // Get the initial value
} else {
// Next parts, resolve it from current
// Before resolving the pointer, let's see if we have a method to call
// Problem with resolving the pointer is we're changing the receiver
isFunc := false
if part.typ == varTypeIdent {
funcValue := current.MethodByName(part.s)
if funcValue.IsValid() {
current = funcValue
isFunc = true
}
}
if !isFunc {
// If current a pointer, resolve it
if current.Kind() == reflect.Ptr {
current = current.Elem()
if !current.IsValid() {
// Value is not valid (anymore)
return AsValue(nil), nil
}
}
// Look up which part must be called now
switch part.typ {
case varTypeInt:
// Calling an index is only possible for:
// * slices/arrays/strings
switch current.Kind() {
case reflect.String, reflect.Array, reflect.Slice:
if part.i >= 0 && current.Len() > part.i {
current = current.Index(part.i)
} else {
// In Django, exceeding the length of a list is just empty.
return AsValue(nil), nil
}
default:
return nil, errors.Errorf("Can't access an index on type %s (variable %s)",
current.Kind().String(), vr.String())
}
case varTypeIdent:
// debugging:
// fmt.Printf("now = %s (kind: %s)\n", part.s, current.Kind().String())
// Calling a field or key
switch current.Kind() {
case reflect.Struct:
current = current.FieldByName(part.s)
case reflect.Map:
current = current.MapIndex(reflect.ValueOf(part.s))
default:
return nil, errors.Errorf("Can't access a field by name on type %s (variable %s)",
current.Kind().String(), vr.String())
}
default:
panic("unimplemented")
}
}
}
if !current.IsValid() {
// Value is not valid (anymore)
return AsValue(nil), nil
}
// If current is a reflect.ValueOf(pongo2.Value), then unpack it
// Happens in function calls (as a return value) or by injecting
// into the execution context (e.g. in a for-loop)
if current.Type() == typeOfValuePtr {
tmpValue := current.Interface().(*Value)
current = tmpValue.val
isSafe = tmpValue.safe
}
// Check whether this is an interface and resolve it where required
if current.Kind() == reflect.Interface {
current = reflect.ValueOf(current.Interface())
}
// Check if the part is a function call
if part.isFunctionCall || current.Kind() == reflect.Func {
// Check for callable
if current.Kind() != reflect.Func {
return nil, errors.Errorf("'%s' is not a function (it is %s)", vr.String(), current.Kind().String())
}
// Check for correct function syntax and types
// func(*Value, ...) *Value
t := current.Type()
currArgs := part.callingArgs
// If an implicit ExecCtx is needed
if t.NumIn() > 0 && t.In(0) == typeOfExecCtxPtr {
currArgs = append([]functionCallArgument{executionCtxEval{}}, currArgs...)
}
// Input arguments
if len(currArgs) != t.NumIn() && !(len(currArgs) >= t.NumIn()-1 && t.IsVariadic()) {
return nil,
errors.Errorf("Function input argument count (%d) of '%s' must be equal to the calling argument count (%d).",
t.NumIn(), vr.String(), len(currArgs))
}
// Output arguments
if t.NumOut() != 1 && t.NumOut() != 2 {
return nil, errors.Errorf("'%s' must have exactly 1 or 2 output arguments, the second argument must be of type error", vr.String())
}
// Evaluate all parameters
var parameters []reflect.Value
numArgs := t.NumIn()
isVariadic := t.IsVariadic()
var fnArg reflect.Type
for idx, arg := range currArgs {
pv, err := arg.Evaluate(ctx)
if err != nil {
return nil, err
}
if isVariadic {
if idx >= t.NumIn()-1 {
fnArg = t.In(numArgs - 1).Elem()
} else {
fnArg = t.In(idx)
}
} else {
fnArg = t.In(idx)
}
if fnArg != typeOfValuePtr {
// Function's argument is not a *pongo2.Value, then we have to check whether input argument is of the same type as the function's argument
if !isVariadic {
if fnArg != reflect.TypeOf(pv.Interface()) && fnArg.Kind() != reflect.Interface {
return nil, errors.Errorf("Function input argument %d of '%s' must be of type %s or *pongo2.Value (not %T).",
idx, vr.String(), fnArg.String(), pv.Interface())
}
// Function's argument has another type, using the interface-value
parameters = append(parameters, reflect.ValueOf(pv.Interface()))
} else {
if fnArg != reflect.TypeOf(pv.Interface()) && fnArg.Kind() != reflect.Interface {
return nil, errors.Errorf("Function variadic input argument of '%s' must be of type %s or *pongo2.Value (not %T).",
vr.String(), fnArg.String(), pv.Interface())
}
// Function's argument has another type, using the interface-value
parameters = append(parameters, reflect.ValueOf(pv.Interface()))
}
} else {
// Function's argument is a *pongo2.Value
parameters = append(parameters, reflect.ValueOf(pv))
}
}
// Check if any of the values are invalid
for _, p := range parameters {
if p.Kind() == reflect.Invalid {
return nil, errors.Errorf("Calling a function using an invalid parameter")
}
}
// Call it and get first return parameter back
values := current.Call(parameters)
rv := values[0]
if t.NumOut() == 2 {
e := values[1].Interface()
if e != nil {
err, ok := e.(error)
if !ok {
return nil, errors.Errorf("The second return value is not an error")
}
if err != nil {
return nil, err
}
}
}
if rv.Type() != typeOfValuePtr {
current = reflect.ValueOf(rv.Interface())
} else {
// Return the function call value
current = rv.Interface().(*Value).val
isSafe = rv.Interface().(*Value).safe
}
}
if !current.IsValid() {
// Value is not valid (e. g. NIL value)
return AsValue(nil), nil
}
}
return &Value{val: current, safe: isSafe}, nil
}
func (vr *variableResolver) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
value, err := vr.resolve(ctx)
if err != nil {
return AsValue(nil), ctx.Error(err.Error(), vr.locationToken)
}
return value, nil
}
func (v *nodeFilteredVariable) FilterApplied(name string) bool {
for _, filter := range v.filterChain {
if filter.name == name {
return true
}
}
return false
}
func (v *nodeFilteredVariable) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
value, err := v.resolver.Evaluate(ctx)
if err != nil {
return nil, err
}
for _, filter := range v.filterChain {
value, err = filter.Execute(value, ctx)
if err != nil {
return nil, err
}
}
return value, nil
}
// IDENT | IDENT.(IDENT|NUMBER)...
func (p *Parser) parseVariableOrLiteral() (IEvaluator, *Error) {
t := p.Current()
if t == nil {
return nil, p.Error("Unexpected EOF, expected a number, string, keyword or identifier.", p.lastToken)
}
// Is first part a number or a string, there's nothing to resolve (because there's only to return the value then)
switch t.Typ {
case TokenNumber:
p.Consume()
// One exception to the rule that we don't have float64 literals is at the beginning
// of an expression (or a variable name). Since we know we started with an integer
// which can't obviously be a variable name, we can check whether the first number
// is followed by dot (and then a number again). If so we're converting it to a float64.
if p.Match(TokenSymbol, ".") != nil {
// float64
t2 := p.MatchType(TokenNumber)
if t2 == nil {
return nil, p.Error("Expected a number after the '.'.", nil)
}
f, err := strconv.ParseFloat(fmt.Sprintf("%s.%s", t.Val, t2.Val), 64)
if err != nil {
return nil, p.Error(err.Error(), t)
}
fr := &floatResolver{
locationToken: t,
val: f,
}
return fr, nil
}
i, err := strconv.Atoi(t.Val)
if err != nil {
return nil, p.Error(err.Error(), t)
}
nr := &intResolver{
locationToken: t,
val: i,
}
return nr, nil
case TokenString:
p.Consume()
sr := &stringResolver{
locationToken: t,
val: t.Val,
}
return sr, nil
case TokenKeyword:
p.Consume()
switch t.Val {
case "true":
br := &boolResolver{
locationToken: t,
val: true,
}
return br, nil
case "false":
br := &boolResolver{
locationToken: t,
val: false,
}
return br, nil
default:
return nil, p.Error("This keyword is not allowed here.", nil)
}
}
resolver := &variableResolver{
locationToken: t,
}
// First part of a variable MUST be an identifier
if t.Typ != TokenIdentifier {
return nil, p.Error("Expected either a number, string, keyword or identifier.", t)
}
resolver.parts = append(resolver.parts, &variablePart{
typ: varTypeIdent,
s: t.Val,
})
p.Consume() // we consumed the first identifier of the variable name
variableLoop:
for p.Remaining() > 0 {
t = p.Current()
if p.Match(TokenSymbol, ".") != nil {
// Next variable part (can be either NUMBER or IDENT)
t2 := p.Current()
if t2 != nil {
switch t2.Typ {
case TokenIdentifier:
resolver.parts = append(resolver.parts, &variablePart{
typ: varTypeIdent,
s: t2.Val,
})
p.Consume() // consume: IDENT
continue variableLoop
case TokenNumber:
i, err := strconv.Atoi(t2.Val)
if err != nil {
return nil, p.Error(err.Error(), t2)
}
resolver.parts = append(resolver.parts, &variablePart{
typ: varTypeInt,
i: i,
})
p.Consume() // consume: NUMBER
continue variableLoop
default:
return nil, p.Error("This token is not allowed within a variable name.", t2)
}
} else {
// EOF
return nil, p.Error("Unexpected EOF, expected either IDENTIFIER or NUMBER after DOT.",
p.lastToken)
}
} else if p.Match(TokenSymbol, "(") != nil {
// Function call
// FunctionName '(' Comma-separated list of expressions ')'
part := resolver.parts[len(resolver.parts)-1]
part.isFunctionCall = true
argumentLoop:
for {
if p.Remaining() == 0 {
return nil, p.Error("Unexpected EOF, expected function call argument list.", p.lastToken)
}
if p.Peek(TokenSymbol, ")") == nil {
// No closing bracket, so we're parsing an expression
exprArg, err := p.ParseExpression()
if err != nil {
return nil, err
}
part.callingArgs = append(part.callingArgs, exprArg)
if p.Match(TokenSymbol, ")") != nil {
// If there's a closing bracket after an expression, we will stop parsing the arguments
break argumentLoop
} else {
// If there's NO closing bracket, there MUST be an comma
if p.Match(TokenSymbol, ",") == nil {
return nil, p.Error("Missing comma or closing bracket after argument.", nil)
}
}
} else {
// We got a closing bracket, so stop parsing arguments
p.Consume()
break argumentLoop
}
}
// We're done parsing the function call, next variable part
continue variableLoop
}
// No dot or function call? Then we're done with the variable parsing
break
}
return resolver, nil
}
func (p *Parser) parseVariableOrLiteralWithFilter() (*nodeFilteredVariable, *Error) {
v := &nodeFilteredVariable{
locationToken: p.Current(),
}
// Parse the variable name
resolver, err := p.parseVariableOrLiteral()
if err != nil {
return nil, err
}
v.resolver = resolver
// Parse all the filters
filterLoop:
for p.Match(TokenSymbol, "|") != nil {
// Parse one single filter
filter, err := p.parseFilter()
if err != nil {
return nil, err
}
// Check sandbox filter restriction
if _, isBanned := p.template.set.bannedFilters[filter.name]; isBanned {
return nil, p.Error(fmt.Sprintf("Usage of filter '%s' is not allowed (sandbox restriction active).", filter.name), nil)
}
v.filterChain = append(v.filterChain, filter)
continue filterLoop
}
return v, nil
}
func (p *Parser) parseVariableElement() (INode, *Error) {
node := &nodeVariable{
locationToken: p.Current(),
}
p.Consume() // consume '{{'
expr, err := p.ParseExpression()
if err != nil {
return nil, err
}
node.expr = expr
if p.Match(TokenSymbol, "}}") == nil {
return nil, p.Error("'}}' expected", nil)
}
return node, nil
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,117 @@
// Go support for Protocol Buffers - Google's data interchange format
//
// Copyright 2017 The Go Authors. All rights reserved.
// https://github.com/golang/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/*
Package remap handles tracking the locations of Go tokens in a source text
across a rewrite by the Go formatter.
*/
package remap
import (
"fmt"
"go/scanner"
"go/token"
)
// A Location represents a span of byte offsets in the source text.
type Location struct {
Pos, End int // End is exclusive
}
// A Map represents a mapping between token locations in an input source text
// and locations in the correspnding output text.
type Map map[Location]Location
// Find reports whether the specified span is recorded by m, and if so returns
// the new location it was mapped to. If the input span was not found, the
// returned location is the same as the input.
func (m Map) Find(pos, end int) (Location, bool) {
key := Location{
Pos: pos,
End: end,
}
if loc, ok := m[key]; ok {
return loc, true
}
return key, false
}
func (m Map) add(opos, oend, npos, nend int) {
m[Location{Pos: opos, End: oend}] = Location{Pos: npos, End: nend}
}
// Compute constructs a location mapping from input to output. An error is
// reported if any of the tokens of output cannot be mapped.
func Compute(input, output []byte) (Map, error) {
itok := tokenize(input)
otok := tokenize(output)
if len(itok) != len(otok) {
return nil, fmt.Errorf("wrong number of tokens, %d ≠ %d", len(itok), len(otok))
}
m := make(Map)
for i, ti := range itok {
to := otok[i]
if ti.Token != to.Token {
return nil, fmt.Errorf("token %d type mismatch: %s ≠ %s", i+1, ti, to)
}
m.add(ti.pos, ti.end, to.pos, to.end)
}
return m, nil
}
// tokinfo records the span and type of a source token.
type tokinfo struct {
pos, end int
token.Token
}
func tokenize(src []byte) []tokinfo {
fs := token.NewFileSet()
var s scanner.Scanner
s.Init(fs.AddFile("src", fs.Base(), len(src)), src, nil, scanner.ScanComments)
var info []tokinfo
for {
pos, next, lit := s.Scan()
switch next {
case token.SEMICOLON:
continue
}
info = append(info, tokinfo{
pos: int(pos - 1),
end: int(pos + token.Pos(len(lit)) - 1),
Token: next,
})
if next == token.EOF {
break
}
}
return info
}

View File

@ -0,0 +1,369 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/protobuf/compiler/plugin.proto
/*
Package plugin_go is a generated protocol buffer package.
It is generated from these files:
google/protobuf/compiler/plugin.proto
It has these top-level messages:
Version
CodeGeneratorRequest
CodeGeneratorResponse
*/
package plugin_go
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// The version number of protocol compiler.
type Version struct {
Major *int32 `protobuf:"varint,1,opt,name=major" json:"major,omitempty"`
Minor *int32 `protobuf:"varint,2,opt,name=minor" json:"minor,omitempty"`
Patch *int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"`
// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
// be empty for mainline stable releases.
Suffix *string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Version) Reset() { *m = Version{} }
func (m *Version) String() string { return proto.CompactTextString(m) }
func (*Version) ProtoMessage() {}
func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
func (m *Version) Unmarshal(b []byte) error {
return xxx_messageInfo_Version.Unmarshal(m, b)
}
func (m *Version) Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Version.Marshal(b, m, deterministic)
}
func (dst *Version) XXX_Merge(src proto.Message) {
xxx_messageInfo_Version.Merge(dst, src)
}
func (m *Version) XXX_Size() int {
return xxx_messageInfo_Version.Size(m)
}
func (m *Version) XXX_DiscardUnknown() {
xxx_messageInfo_Version.DiscardUnknown(m)
}
var xxx_messageInfo_Version proto.InternalMessageInfo
func (m *Version) GetMajor() int32 {
if m != nil && m.Major != nil {
return *m.Major
}
return 0
}
func (m *Version) GetMinor() int32 {
if m != nil && m.Minor != nil {
return *m.Minor
}
return 0
}
func (m *Version) GetPatch() int32 {
if m != nil && m.Patch != nil {
return *m.Patch
}
return 0
}
func (m *Version) GetSuffix() string {
if m != nil && m.Suffix != nil {
return *m.Suffix
}
return ""
}
// An encoded CodeGeneratorRequest is written to the plugin's stdin.
type CodeGeneratorRequest struct {
// The .proto files that were explicitly listed on the command-line. The
// code generator should generate code only for these files. Each file's
// descriptor will be included in proto_file, below.
FileToGenerate []string `protobuf:"bytes,1,rep,name=file_to_generate,json=fileToGenerate" json:"file_to_generate,omitempty"`
// The generator parameter passed on the command-line.
Parameter *string `protobuf:"bytes,2,opt,name=parameter" json:"parameter,omitempty"`
// FileDescriptorProtos for all files in files_to_generate and everything
// they import. The files will appear in topological order, so each file
// appears before any file that imports it.
//
// protoc guarantees that all proto_files will be written after
// the fields above, even though this is not technically guaranteed by the
// protobuf wire format. This theoretically could allow a plugin to stream
// in the FileDescriptorProtos and handle them one by one rather than read
// the entire set into memory at once. However, as of this writing, this
// is not similarly optimized on protoc's end -- it will store all fields in
// memory at once before sending them to the plugin.
//
// Type names of fields and extensions in the FileDescriptorProto are always
// fully qualified.
ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file,json=protoFile" json:"proto_file,omitempty"`
// The version number of protocol compiler.
CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *CodeGeneratorRequest) Reset() { *m = CodeGeneratorRequest{} }
func (m *CodeGeneratorRequest) String() string { return proto.CompactTextString(m) }
func (*CodeGeneratorRequest) ProtoMessage() {}
func (*CodeGeneratorRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
func (m *CodeGeneratorRequest) Unmarshal(b []byte) error {
return xxx_messageInfo_CodeGeneratorRequest.Unmarshal(m, b)
}
func (m *CodeGeneratorRequest) Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CodeGeneratorRequest.Marshal(b, m, deterministic)
}
func (dst *CodeGeneratorRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_CodeGeneratorRequest.Merge(dst, src)
}
func (m *CodeGeneratorRequest) XXX_Size() int {
return xxx_messageInfo_CodeGeneratorRequest.Size(m)
}
func (m *CodeGeneratorRequest) XXX_DiscardUnknown() {
xxx_messageInfo_CodeGeneratorRequest.DiscardUnknown(m)
}
var xxx_messageInfo_CodeGeneratorRequest proto.InternalMessageInfo
func (m *CodeGeneratorRequest) GetFileToGenerate() []string {
if m != nil {
return m.FileToGenerate
}
return nil
}
func (m *CodeGeneratorRequest) GetParameter() string {
if m != nil && m.Parameter != nil {
return *m.Parameter
}
return ""
}
func (m *CodeGeneratorRequest) GetProtoFile() []*google_protobuf.FileDescriptorProto {
if m != nil {
return m.ProtoFile
}
return nil
}
func (m *CodeGeneratorRequest) GetCompilerVersion() *Version {
if m != nil {
return m.CompilerVersion
}
return nil
}
// The plugin writes an encoded CodeGeneratorResponse to stdout.
type CodeGeneratorResponse struct {
// Error message. If non-empty, code generation failed. The plugin process
// should exit with status code zero even if it reports an error in this way.
//
// This should be used to indicate errors in .proto files which prevent the
// code generator from generating correct code. Errors which indicate a
// problem in protoc itself -- such as the input CodeGeneratorRequest being
// unparseable -- should be reported by writing a message to stderr and
// exiting with a non-zero status code.
Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"`
File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *CodeGeneratorResponse) Reset() { *m = CodeGeneratorResponse{} }
func (m *CodeGeneratorResponse) String() string { return proto.CompactTextString(m) }
func (*CodeGeneratorResponse) ProtoMessage() {}
func (*CodeGeneratorResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
func (m *CodeGeneratorResponse) Unmarshal(b []byte) error {
return xxx_messageInfo_CodeGeneratorResponse.Unmarshal(m, b)
}
func (m *CodeGeneratorResponse) Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CodeGeneratorResponse.Marshal(b, m, deterministic)
}
func (dst *CodeGeneratorResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_CodeGeneratorResponse.Merge(dst, src)
}
func (m *CodeGeneratorResponse) XXX_Size() int {
return xxx_messageInfo_CodeGeneratorResponse.Size(m)
}
func (m *CodeGeneratorResponse) XXX_DiscardUnknown() {
xxx_messageInfo_CodeGeneratorResponse.DiscardUnknown(m)
}
var xxx_messageInfo_CodeGeneratorResponse proto.InternalMessageInfo
func (m *CodeGeneratorResponse) GetError() string {
if m != nil && m.Error != nil {
return *m.Error
}
return ""
}
func (m *CodeGeneratorResponse) GetFile() []*CodeGeneratorResponse_File {
if m != nil {
return m.File
}
return nil
}
// Represents a single generated file.
type CodeGeneratorResponse_File struct {
// The file name, relative to the output directory. The name must not
// contain "." or ".." components and must be relative, not be absolute (so,
// the file cannot lie outside the output directory). "/" must be used as
// the path separator, not "\".
//
// If the name is omitted, the content will be appended to the previous
// file. This allows the generator to break large files into small chunks,
// and allows the generated text to be streamed back to protoc so that large
// files need not reside completely in memory at one time. Note that as of
// this writing protoc does not optimize for this -- it will read the entire
// CodeGeneratorResponse before writing files to disk.
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
// If non-empty, indicates that the named file should already exist, and the
// content here is to be inserted into that file at a defined insertion
// point. This feature allows a code generator to extend the output
// produced by another code generator. The original generator may provide
// insertion points by placing special annotations in the file that look
// like:
// @@protoc_insertion_point(NAME)
// The annotation can have arbitrary text before and after it on the line,
// which allows it to be placed in a comment. NAME should be replaced with
// an identifier naming the point -- this is what other generators will use
// as the insertion_point. Code inserted at this point will be placed
// immediately above the line containing the insertion point (thus multiple
// insertions to the same point will come out in the order they were added).
// The double-@ is intended to make it unlikely that the generated code
// could contain things that look like insertion points by accident.
//
// For example, the C++ code generator places the following line in the
// .pb.h files that it generates:
// // @@protoc_insertion_point(namespace_scope)
// This line appears within the scope of the file's package namespace, but
// outside of any particular class. Another plugin can then specify the
// insertion_point "namespace_scope" to generate additional classes or
// other declarations that should be placed in this scope.
//
// Note that if the line containing the insertion point begins with
// whitespace, the same whitespace will be added to every line of the
// inserted text. This is useful for languages like Python, where
// indentation matters. In these languages, the insertion point comment
// should be indented the same amount as any inserted code will need to be
// in order to work correctly in that context.
//
// The code generator that generates the initial file and the one which
// inserts into it must both run as part of a single invocation of protoc.
// Code generators are executed in the order in which they appear on the
// command line.
//
// If |insertion_point| is present, |name| must also be present.
InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point,json=insertionPoint" json:"insertion_point,omitempty"`
// The file contents.
Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *CodeGeneratorResponse_File) Reset() { *m = CodeGeneratorResponse_File{} }
func (m *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(m) }
func (*CodeGeneratorResponse_File) ProtoMessage() {}
func (*CodeGeneratorResponse_File) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} }
func (m *CodeGeneratorResponse_File) Unmarshal(b []byte) error {
return xxx_messageInfo_CodeGeneratorResponse_File.Unmarshal(m, b)
}
func (m *CodeGeneratorResponse_File) Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CodeGeneratorResponse_File.Marshal(b, m, deterministic)
}
func (dst *CodeGeneratorResponse_File) XXX_Merge(src proto.Message) {
xxx_messageInfo_CodeGeneratorResponse_File.Merge(dst, src)
}
func (m *CodeGeneratorResponse_File) XXX_Size() int {
return xxx_messageInfo_CodeGeneratorResponse_File.Size(m)
}
func (m *CodeGeneratorResponse_File) XXX_DiscardUnknown() {
xxx_messageInfo_CodeGeneratorResponse_File.DiscardUnknown(m)
}
var xxx_messageInfo_CodeGeneratorResponse_File proto.InternalMessageInfo
func (m *CodeGeneratorResponse_File) GetName() string {
if m != nil && m.Name != nil {
return *m.Name
}
return ""
}
func (m *CodeGeneratorResponse_File) GetInsertionPoint() string {
if m != nil && m.InsertionPoint != nil {
return *m.InsertionPoint
}
return ""
}
func (m *CodeGeneratorResponse_File) GetContent() string {
if m != nil && m.Content != nil {
return *m.Content
}
return ""
}
func init() {
proto.RegisterType((*Version)(nil), "google.protobuf.compiler.Version")
proto.RegisterType((*CodeGeneratorRequest)(nil), "google.protobuf.compiler.CodeGeneratorRequest")
proto.RegisterType((*CodeGeneratorResponse)(nil), "google.protobuf.compiler.CodeGeneratorResponse")
proto.RegisterType((*CodeGeneratorResponse_File)(nil), "google.protobuf.compiler.CodeGeneratorResponse.File")
}
func init() { proto.RegisterFile("google/protobuf/compiler/plugin.proto", fileDescriptor0) }
var fileDescriptor0 = []byte{
// 417 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xcf, 0x6a, 0x14, 0x41,
0x10, 0xc6, 0x19, 0x77, 0x63, 0x98, 0x8a, 0x64, 0x43, 0x13, 0xa5, 0x09, 0x39, 0x8c, 0x8b, 0xe2,
0x5c, 0x32, 0x0b, 0xc1, 0x8b, 0x78, 0x4b, 0x44, 0x3d, 0x78, 0x58, 0x1a, 0xf1, 0x20, 0xc8, 0x30,
0x99, 0xd4, 0x74, 0x5a, 0x66, 0xba, 0xc6, 0xee, 0x1e, 0xf1, 0x49, 0x7d, 0x0f, 0xdf, 0x40, 0xfa,
0xcf, 0x24, 0xb2, 0xb8, 0xa7, 0xee, 0xef, 0x57, 0xd5, 0xd5, 0x55, 0x1f, 0x05, 0x2f, 0x25, 0x91,
0xec, 0x71, 0x33, 0x1a, 0x72, 0x74, 0x33, 0x75, 0x9b, 0x96, 0x86, 0x51, 0xf5, 0x68, 0x36, 0x63,
0x3f, 0x49, 0xa5, 0xab, 0x10, 0x60, 0x3c, 0xa6, 0x55, 0x73, 0x5a, 0x35, 0xa7, 0x9d, 0x15, 0xbb,
0x05, 0x6e, 0xd1, 0xb6, 0x46, 0x8d, 0x8e, 0x4c, 0xcc, 0x5e, 0xb7, 0x70, 0xf8, 0x05, 0x8d, 0x55,
0xa4, 0xd9, 0x29, 0x1c, 0x0c, 0xcd, 0x77, 0x32, 0x3c, 0x2b, 0xb2, 0xf2, 0x40, 0x44, 0x11, 0xa8,
0xd2, 0x64, 0xf8, 0xa3, 0x44, 0xbd, 0xf0, 0x74, 0x6c, 0x5c, 0x7b, 0xc7, 0x17, 0x91, 0x06, 0xc1,
0x9e, 0xc1, 0x63, 0x3b, 0x75, 0x9d, 0xfa, 0xc5, 0x97, 0x45, 0x56, 0xe6, 0x22, 0xa9, 0xf5, 0x9f,
0x0c, 0x4e, 0xaf, 0xe9, 0x16, 0x3f, 0xa0, 0x46, 0xd3, 0x38, 0x32, 0x02, 0x7f, 0x4c, 0x68, 0x1d,
0x2b, 0xe1, 0xa4, 0x53, 0x3d, 0xd6, 0x8e, 0x6a, 0x19, 0x63, 0xc8, 0xb3, 0x62, 0x51, 0xe6, 0xe2,
0xd8, 0xf3, 0xcf, 0x94, 0x5e, 0x20, 0x3b, 0x87, 0x7c, 0x6c, 0x4c, 0x33, 0xa0, 0xc3, 0xd8, 0x4a,
0x2e, 0x1e, 0x00, 0xbb, 0x06, 0x08, 0xe3, 0xd4, 0xfe, 0x15, 0x5f, 0x15, 0x8b, 0xf2, 0xe8, 0xf2,
0x45, 0xb5, 0x6b, 0xcb, 0x7b, 0xd5, 0xe3, 0xbb, 0x7b, 0x03, 0xb6, 0x1e, 0x8b, 0x3c, 0x44, 0x7d,
0x84, 0x7d, 0x82, 0x93, 0xd9, 0xb8, 0xfa, 0x67, 0xf4, 0x24, 0x8c, 0x77, 0x74, 0xf9, 0xbc, 0xda,
0xe7, 0x70, 0x95, 0xcc, 0x13, 0xab, 0x99, 0x24, 0xb0, 0xfe, 0x9d, 0xc1, 0xd3, 0x9d, 0x99, 0xed,
0x48, 0xda, 0xa2, 0xf7, 0x0e, 0x8d, 0x49, 0x3e, 0xe7, 0x22, 0x0a, 0xf6, 0x11, 0x96, 0xff, 0x34,
0xff, 0x7a, 0xff, 0x8f, 0xff, 0x2d, 0x1a, 0x66, 0x13, 0xa1, 0xc2, 0xd9, 0x37, 0x58, 0x86, 0x79,
0x18, 0x2c, 0x75, 0x33, 0x60, 0xfa, 0x26, 0xdc, 0xd9, 0x2b, 0x58, 0x29, 0x6d, 0xd1, 0x38, 0x45,
0xba, 0x1e, 0x49, 0x69, 0x97, 0xcc, 0x3c, 0xbe, 0xc7, 0x5b, 0x4f, 0x19, 0x87, 0xc3, 0x96, 0xb4,
0x43, 0xed, 0xf8, 0x2a, 0x24, 0xcc, 0xf2, 0x4a, 0xc2, 0x79, 0x4b, 0xc3, 0xde, 0xfe, 0xae, 0x9e,
0x6c, 0xc3, 0x6e, 0x06, 0x7b, 0xed, 0xd7, 0x37, 0x52, 0xb9, 0xbb, 0xe9, 0xc6, 0x87, 0x37, 0x92,
0xfa, 0x46, 0xcb, 0x87, 0x65, 0x0c, 0x97, 0xf6, 0x42, 0xa2, 0xbe, 0x90, 0x94, 0x56, 0xfa, 0x6d,
0x3c, 0x6a, 0x49, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xf7, 0x15, 0x40, 0xc5, 0xfe, 0x02, 0x00,
0x00,
}

View File

@ -0,0 +1,83 @@
// Code generated by protoc-gen-go.
// source: google/protobuf/compiler/plugin.proto
// DO NOT EDIT!
package google_protobuf_compiler
import proto "github.com/golang/protobuf/proto"
import "math"
import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor"
// Reference proto and math imports to suppress error if they are not otherwise used.
var _ = proto.GetString
var _ = math.Inf
type CodeGeneratorRequest struct {
FileToGenerate []string `protobuf:"bytes,1,rep,name=file_to_generate" json:"file_to_generate,omitempty"`
Parameter *string `protobuf:"bytes,2,opt,name=parameter" json:"parameter,omitempty"`
ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file" json:"proto_file,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *CodeGeneratorRequest) Reset() { *this = CodeGeneratorRequest{} }
func (this *CodeGeneratorRequest) String() string { return proto.CompactTextString(this) }
func (*CodeGeneratorRequest) ProtoMessage() {}
func (this *CodeGeneratorRequest) GetParameter() string {
if this != nil && this.Parameter != nil {
return *this.Parameter
}
return ""
}
type CodeGeneratorResponse struct {
Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"`
File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *CodeGeneratorResponse) Reset() { *this = CodeGeneratorResponse{} }
func (this *CodeGeneratorResponse) String() string { return proto.CompactTextString(this) }
func (*CodeGeneratorResponse) ProtoMessage() {}
func (this *CodeGeneratorResponse) GetError() string {
if this != nil && this.Error != nil {
return *this.Error
}
return ""
}
type CodeGeneratorResponse_File struct {
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point" json:"insertion_point,omitempty"`
Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *CodeGeneratorResponse_File) Reset() { *this = CodeGeneratorResponse_File{} }
func (this *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(this) }
func (*CodeGeneratorResponse_File) ProtoMessage() {}
func (this *CodeGeneratorResponse_File) GetName() string {
if this != nil && this.Name != nil {
return *this.Name
}
return ""
}
func (this *CodeGeneratorResponse_File) GetInsertionPoint() string {
if this != nil && this.InsertionPoint != nil {
return *this.InsertionPoint
}
return ""
}
func (this *CodeGeneratorResponse_File) GetContent() string {
if this != nil && this.Content != nil {
return *this.Content
}
return ""
}
func init() {
}

View File

@ -0,0 +1,167 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: kenton@google.com (Kenton Varda)
//
// WARNING: The plugin interface is currently EXPERIMENTAL and is subject to
// change.
//
// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is
// just a program that reads a CodeGeneratorRequest from stdin and writes a
// CodeGeneratorResponse to stdout.
//
// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead
// of dealing with the raw protocol defined here.
//
// A plugin executable needs only to be placed somewhere in the path. The
// plugin should be named "protoc-gen-$NAME", and will then be used when the
// flag "--${NAME}_out" is passed to protoc.
syntax = "proto2";
package google.protobuf.compiler;
option java_package = "com.google.protobuf.compiler";
option java_outer_classname = "PluginProtos";
option go_package = "github.com/golang/protobuf/protoc-gen-go/plugin;plugin_go";
import "google/protobuf/descriptor.proto";
// The version number of protocol compiler.
message Version {
optional int32 major = 1;
optional int32 minor = 2;
optional int32 patch = 3;
// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
// be empty for mainline stable releases.
optional string suffix = 4;
}
// An encoded CodeGeneratorRequest is written to the plugin's stdin.
message CodeGeneratorRequest {
// The .proto files that were explicitly listed on the command-line. The
// code generator should generate code only for these files. Each file's
// descriptor will be included in proto_file, below.
repeated string file_to_generate = 1;
// The generator parameter passed on the command-line.
optional string parameter = 2;
// FileDescriptorProtos for all files in files_to_generate and everything
// they import. The files will appear in topological order, so each file
// appears before any file that imports it.
//
// protoc guarantees that all proto_files will be written after
// the fields above, even though this is not technically guaranteed by the
// protobuf wire format. This theoretically could allow a plugin to stream
// in the FileDescriptorProtos and handle them one by one rather than read
// the entire set into memory at once. However, as of this writing, this
// is not similarly optimized on protoc's end -- it will store all fields in
// memory at once before sending them to the plugin.
//
// Type names of fields and extensions in the FileDescriptorProto are always
// fully qualified.
repeated FileDescriptorProto proto_file = 15;
// The version number of protocol compiler.
optional Version compiler_version = 3;
}
// The plugin writes an encoded CodeGeneratorResponse to stdout.
message CodeGeneratorResponse {
// Error message. If non-empty, code generation failed. The plugin process
// should exit with status code zero even if it reports an error in this way.
//
// This should be used to indicate errors in .proto files which prevent the
// code generator from generating correct code. Errors which indicate a
// problem in protoc itself -- such as the input CodeGeneratorRequest being
// unparseable -- should be reported by writing a message to stderr and
// exiting with a non-zero status code.
optional string error = 1;
// Represents a single generated file.
message File {
// The file name, relative to the output directory. The name must not
// contain "." or ".." components and must be relative, not be absolute (so,
// the file cannot lie outside the output directory). "/" must be used as
// the path separator, not "\".
//
// If the name is omitted, the content will be appended to the previous
// file. This allows the generator to break large files into small chunks,
// and allows the generated text to be streamed back to protoc so that large
// files need not reside completely in memory at one time. Note that as of
// this writing protoc does not optimize for this -- it will read the entire
// CodeGeneratorResponse before writing files to disk.
optional string name = 1;
// If non-empty, indicates that the named file should already exist, and the
// content here is to be inserted into that file at a defined insertion
// point. This feature allows a code generator to extend the output
// produced by another code generator. The original generator may provide
// insertion points by placing special annotations in the file that look
// like:
// @@protoc_insertion_point(NAME)
// The annotation can have arbitrary text before and after it on the line,
// which allows it to be placed in a comment. NAME should be replaced with
// an identifier naming the point -- this is what other generators will use
// as the insertion_point. Code inserted at this point will be placed
// immediately above the line containing the insertion point (thus multiple
// insertions to the same point will come out in the order they were added).
// The double-@ is intended to make it unlikely that the generated code
// could contain things that look like insertion points by accident.
//
// For example, the C++ code generator places the following line in the
// .pb.h files that it generates:
// // @@protoc_insertion_point(namespace_scope)
// This line appears within the scope of the file's package namespace, but
// outside of any particular class. Another plugin can then specify the
// insertion_point "namespace_scope" to generate additional classes or
// other declarations that should be placed in this scope.
//
// Note that if the line containing the insertion point begins with
// whitespace, the same whitespace will be added to every line of the
// inserted text. This is useful for languages like Python, where
// indentation matters. In these languages, the insertion point comment
// should be indented the same amount as any inserted code will need to be
// in order to work correctly in that context.
//
// The code generator that generates the initial file and the one which
// inserts into it must both run as part of a single invocation of protoc.
// Code generators are executed in the order in which they appear on the
// command line.
//
// If |insertion_point| is present, |name| must also be present.
optional string insertion_point = 2;
// The file contents.
optional string content = 15;
}
repeated File file = 15;
}

View File

@ -0,0 +1,202 @@
# Created by .ignore support plugin (hsz.mobi)
### Go template
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test
*.prof
### Windows template
# Windows image file caches
Thumbs.db
ehthumbs.db
# Folder config file
Desktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msm
*.msp
# Windows shortcuts
*.lnk
### Kate template
# Swap Files #
.*.kate-swp
.swp.*
### SublimeText template
# cache files for sublime text
*.tmlanguage.cache
*.tmPreferences.cache
*.stTheme.cache
# workspace files are user-specific
*.sublime-workspace
# project files should be checked into the repository, unless a significant
# proportion of contributors will probably not be using SublimeText
# *.sublime-project
# sftp configuration file
sftp-config.json
### Linux template
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff:
.idea
.idea/tasks.xml
.idea/dictionaries
.idea/vcs.xml
.idea/jsLibraryMappings.xml
# Sensitive or high-churn files:
.idea/dataSources.ids
.idea/dataSources.xml
.idea/dataSources.local.xml
.idea/sqlDataSources.xml
.idea/dynamic.xml
.idea/uiDesigner.xml
# Gradle:
.idea/gradle.xml
.idea/libraries
# Mongo Explorer plugin:
.idea/mongoSettings.xml
## File-based project format:
*.iws
## Plugin-specific files:
# IntelliJ
/out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
### Xcode template
# Xcode
#
# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
## Build generated
build/
DerivedData/
## Various settings
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
xcuserdata/
## Other
*.moved-aside
*.xccheckout
*.xcscmblueprint
### Eclipse template
.metadata
bin/
tmp/
*.tmp
*.bak
*.swp
*~.nib
local.properties
.settings/
.loadpath
.recommenders
# Eclipse Core
.project
# External tool builders
.externalToolBuilders/
# Locally stored "Eclipse launch configurations"
*.launch
# PyDev specific (Python IDE for Eclipse)
*.pydevproject
# CDT-specific (C/C++ Development Tooling)
.cproject
# JDT-specific (Eclipse Java Development Tools)
.classpath
# Java annotation processor (APT)
.factorypath
# PDT-specific (PHP Development Tools)
.buildpath
# sbteclipse plugin
.target
# Tern plugin
.tern-project
# TeXlipse plugin
.texlipse
# STS (Spring Tool Suite)
.springBeans
# Code Recommenders
.recommenders/
coverage.txt
#vendor
vendor/

View File

@ -0,0 +1,21 @@
sudo: false
language: go
go:
- 1.8.x
env:
- DEP_VERSION="0.3.2"
before_install:
# Download the binary to bin folder in $GOPATH
- curl -L -s https://github.com/golang/dep/releases/download/v${DEP_VERSION}/dep-linux-amd64 -o $GOPATH/bin/dep
# Make the binary executable
- chmod +x $GOPATH/bin/dep
install:
- dep ensure
script:
- make test
after_success:
- bash <(curl -s https://codecov.io/bash)

View File

@ -0,0 +1,30 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
Types of changes:
- `Added` for new features.
- `Changed` for changes in existing functionality.
- `Deprecated` for soon-to-be removed features.
- `Removed` for now removed features.
- `Fixed` for any bug fixes.
- `Security` in case of vulnerabilities.
## [Unreleased]
### Added
- This CHANGELOG file to keep track of changes.
## 1.0.0 - 2018-05-08
### Added
- grpc_auth
- grpc_ctxtags
- grpc_zap
- grpc_logrus
- grpc_opentracing
- grpc_retry
- grpc_validator
- grpc_recovery
[Unreleased]: https://github.com/grpc-ecosystem/go-grpc-middleware/compare/v1.0.0...HEAD

View File

@ -0,0 +1,20 @@
# Contributing
We would love to have people submit pull requests and help make `grpc-ecosystem/go-grpc-middleware` even better 👍.
Fork, then clone the repo:
```bash
git clone git@github.com:your-username/go-grpc-middleware.git
```
Before checking in please run the following:
```bash
make all
```
This will `vet`, `fmt`, regenerate documentation and run all tests.
Push to your fork and open a pull request.

View File

@ -0,0 +1,123 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
name = "cloud.google.com/go"
packages = ["compute/metadata"]
revision = "2d3a6656c17a60b0815b7e06ab0be04eacb6e613"
version = "v0.16.0"
[[projects]]
name = "github.com/davecgh/go-spew"
packages = ["spew"]
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
version = "v1.1.0"
[[projects]]
name = "github.com/gogo/protobuf"
packages = ["gogoproto","proto","protoc-gen-gogo/descriptor"]
revision = "342cbe0a04158f6dcb03ca0079991a51a4248c02"
version = "v0.5"
[[projects]]
branch = "master"
name = "github.com/golang/protobuf"
packages = ["jsonpb","proto","ptypes","ptypes/any","ptypes/duration","ptypes/struct","ptypes/timestamp"]
revision = "1e59b77b52bf8e4b449a57e6f79f21226d571845"
[[projects]]
name = "github.com/opentracing/opentracing-go"
packages = [".","ext","log","mocktracer"]
revision = "1949ddbfd147afd4d964a9f00b24eb291e0e7c38"
version = "v1.0.2"
[[projects]]
name = "github.com/pmezard/go-difflib"
packages = ["difflib"]
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
version = "v1.0.0"
[[projects]]
name = "github.com/sirupsen/logrus"
packages = ["."]
revision = "f006c2ac4710855cf0f916dd6b77acf6b048dc6e"
version = "v1.0.3"
[[projects]]
name = "github.com/stretchr/testify"
packages = ["assert","require","suite"]
revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0"
version = "v1.1.4"
[[projects]]
name = "go.uber.org/atomic"
packages = ["."]
revision = "8474b86a5a6f79c443ce4b2992817ff32cf208b8"
version = "v1.3.1"
[[projects]]
name = "go.uber.org/multierr"
packages = ["."]
revision = "3c4937480c32f4c13a875a1829af76c98ca3d40a"
version = "v1.1.0"
[[projects]]
name = "go.uber.org/zap"
packages = [".","buffer","internal/bufferpool","internal/color","internal/exit","zapcore"]
revision = "35aad584952c3e7020db7b839f6b102de6271f89"
version = "v1.7.1"
[[projects]]
branch = "master"
name = "golang.org/x/crypto"
packages = ["ssh/terminal"]
revision = "94eea52f7b742c7cbe0b03b22f0c4c8631ece122"
[[projects]]
branch = "master"
name = "golang.org/x/net"
packages = ["context","context/ctxhttp","http2","http2/hpack","idna","internal/timeseries","lex/httplex","trace"]
revision = "a8b9294777976932365dabb6640cf1468d95c70f"
[[projects]]
branch = "master"
name = "golang.org/x/oauth2"
packages = [".","google","internal","jws","jwt"]
revision = "f95fa95eaa936d9d87489b15d1d18b97c1ba9c28"
[[projects]]
branch = "master"
name = "golang.org/x/sys"
packages = ["unix","windows"]
revision = "13fcbd661c8ececa8807a29b48407d674b1d8ed8"
[[projects]]
branch = "master"
name = "golang.org/x/text"
packages = ["collate","collate/build","internal/colltab","internal/gen","internal/tag","internal/triegen","internal/ucd","language","secure/bidirule","transform","unicode/bidi","unicode/cldr","unicode/norm","unicode/rangetable"]
revision = "75cc3cad82b5f47d3fb229ddda8c5167da14f294"
[[projects]]
name = "google.golang.org/appengine"
packages = [".","internal","internal/app_identity","internal/base","internal/datastore","internal/log","internal/modules","internal/remote_api","internal/urlfetch","urlfetch"]
revision = "150dc57a1b433e64154302bdc40b6bb8aefa313a"
version = "v1.0.0"
[[projects]]
branch = "master"
name = "google.golang.org/genproto"
packages = ["googleapis/rpc/status"]
revision = "7f0da29060c682909f650ad8ed4e515bd74fa12a"
[[projects]]
name = "google.golang.org/grpc"
packages = [".","balancer","balancer/roundrobin","codes","connectivity","credentials","credentials/oauth","encoding","grpclb/grpc_lb_v1/messages","grpclog","internal","keepalive","metadata","naming","peer","resolver","resolver/dns","resolver/passthrough","stats","status","tap","transport"]
revision = "5a9f7b402fe85096d2e1d0383435ee1876e863d0"
version = "v1.8.0"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "b24c6670412eb0bc44ed1db77fecc52333f8725f3e3272bdc568f5683a63031f"
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -0,0 +1,35 @@
[[constraint]]
name = "github.com/gogo/protobuf"
version = "0.5.0"
[[constraint]]
branch = "master"
name = "github.com/golang/protobuf"
[[constraint]]
name = "github.com/opentracing/opentracing-go"
version = "1.0.2"
[[constraint]]
name = "github.com/sirupsen/logrus"
version = "1.0.3"
[[constraint]]
name = "github.com/stretchr/testify"
version = "1.1.4"
[[constraint]]
name = "go.uber.org/zap"
version = "1.7.1"
[[constraint]]
branch = "master"
name = "golang.org/x/net"
[[constraint]]
branch = "master"
name = "golang.org/x/oauth2"
[[constraint]]
name = "google.golang.org/grpc"
version = "1.8.0"

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,84 @@
# Go gRPC Middleware
[![Travis Build](https://travis-ci.org/grpc-ecosystem/go-grpc-middleware.svg?branch=master)](https://travis-ci.org/grpc-ecosystem/go-grpc-middleware)
[![Go Report Card](https://goreportcard.com/badge/github.com/grpc-ecosystem/go-grpc-middleware)](https://goreportcard.com/report/github.com/grpc-ecosystem/go-grpc-middleware)
[![GoDoc](http://img.shields.io/badge/GoDoc-Reference-blue.svg)](https://godoc.org/github.com/grpc-ecosystem/go-grpc-middleware)
[![SourceGraph](https://sourcegraph.com/github.com/grpc-ecosystem/go-grpc-middleware/-/badge.svg)](https://sourcegraph.com/github.com/grpc-ecosystem/go-grpc-middleware/?badge)
[![codecov](https://codecov.io/gh/grpc-ecosystem/go-grpc-middleware/branch/master/graph/badge.svg)](https://codecov.io/gh/grpc-ecosystem/go-grpc-middleware)
[![Apache 2.0 License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE)
[![quality: production](https://img.shields.io/badge/quality-production-orange.svg)](#status)
[![Slack](slack.png)](https://join.slack.com/t/improbable-eng/shared_invite/enQtMzQ1ODcyMzQ5MjM4LWY5ZWZmNGM2ODc5MmViNmQ3ZTA3ZTY3NzQwOTBlMTkzZmIxZTIxODk0OWU3YjZhNWVlNDU3MDlkZGViZjhkMjc)
[gRPC Go](https://github.com/grpc/grpc-go) Middleware: interceptors, helpers, utilities.
## Middleware
[gRPC Go](https://github.com/grpc/grpc-go) recently acquired support for
Interceptors, i.e. [middleware](https://medium.com/@matryer/writing-middleware-in-golang-and-how-go-makes-it-so-much-fun-4375c1246e81#.gv7tdlghs)
that is executed either on the gRPC Server before the request is passed onto the user's application logic, or on the gRPC client either around the user call. It is a perfect way to implement
common patterns: auth, logging, message, validation, retries or monitoring.
These are generic building blocks that make it easy to build multiple microservices easily.
The purpose of this repository is to act as a go-to point for such reusable functionality. It contains
some of them itself, but also will link to useful external repos.
`grpc_middleware` itself provides support for chaining interceptors, here's an example:
```go
import "github.com/grpc-ecosystem/go-grpc-middleware"
myServer := grpc.NewServer(
grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(
grpc_ctxtags.StreamServerInterceptor(),
grpc_opentracing.StreamServerInterceptor(),
grpc_prometheus.StreamServerInterceptor,
grpc_zap.StreamServerInterceptor(zapLogger),
grpc_auth.StreamServerInterceptor(myAuthFunction),
grpc_recovery.StreamServerInterceptor(),
)),
grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(
grpc_ctxtags.UnaryServerInterceptor(),
grpc_opentracing.UnaryServerInterceptor(),
grpc_prometheus.UnaryServerInterceptor,
grpc_zap.UnaryServerInterceptor(zapLogger),
grpc_auth.UnaryServerInterceptor(myAuthFunction),
grpc_recovery.UnaryServerInterceptor(),
)),
)
```
## Interceptors
*Please send a PR to add new interceptors or middleware to this list*
#### Auth
* [`grpc_auth`](auth) - a customizable (via `AuthFunc`) piece of auth middleware
#### Logging
* [`grpc_ctxtags`](tags/) - a library that adds a `Tag` map to context, with data populated from request body
* [`grpc_zap`](logging/zap/) - integration of [zap](https://github.com/uber-go/zap) logging library into gRPC handlers.
* [`grpc_logrus`](logging/logrus/) - integration of [logrus](https://github.com/sirupsen/logrus) logging library into gRPC handlers.
#### Monitoring
* [`grpc_prometheus`⚡](https://github.com/grpc-ecosystem/go-grpc-prometheus) - Prometheus client-side and server-side monitoring middleware
* [`otgrpc`⚡](https://github.com/grpc-ecosystem/grpc-opentracing/tree/master/go/otgrpc) - [OpenTracing](http://opentracing.io/) client-side and server-side interceptors
* [`grpc_opentracing`](tracing/opentracing) - [OpenTracing](http://opentracing.io/) client-side and server-side interceptors with support for streaming and handler-returned tags
#### Client
* [`grpc_retry`](retry/) - a generic gRPC response code retry mechanism, client-side middleware
#### Server
* [`grpc_validator`](validator/) - codegen inbound message validation from `.proto` options
* [`grpc_recovery`](recovery/) - turn panics into gRPC errors
## Status
This code has been running in *production* since May 2016 as the basis of the gRPC micro services stack at [Improbable](https://improbable.io).
Additional tooling will be added, and contributions are welcome.
## License
`go-grpc-middleware` is released under the Apache 2.0 license. See the [LICENSE](LICENSE) file for details.

View File

@ -0,0 +1,183 @@
// Copyright 2016 Michal Witkowski. All Rights Reserved.
// See LICENSE for licensing terms.
// gRPC Server Interceptor chaining middleware.
package grpc_middleware
import (
"golang.org/x/net/context"
"google.golang.org/grpc"
)
// ChainUnaryServer creates a single interceptor out of a chain of many interceptors.
//
// Execution is done in left-to-right order, including passing of context.
// For example ChainUnaryServer(one, two, three) will execute one before two before three, and three
// will see context changes of one and two.
func ChainUnaryServer(interceptors ...grpc.UnaryServerInterceptor) grpc.UnaryServerInterceptor {
n := len(interceptors)
if n > 1 {
lastI := n - 1
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
var (
chainHandler grpc.UnaryHandler
curI int
)
chainHandler = func(currentCtx context.Context, currentReq interface{}) (interface{}, error) {
if curI == lastI {
return handler(currentCtx, currentReq)
}
curI++
resp, err := interceptors[curI](currentCtx, currentReq, info, chainHandler)
curI--
return resp, err
}
return interceptors[0](ctx, req, info, chainHandler)
}
}
if n == 1 {
return interceptors[0]
}
// n == 0; Dummy interceptor maintained for backward compatibility to avoid returning nil.
return func(ctx context.Context, req interface{}, _ *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
return handler(ctx, req)
}
}
// ChainStreamServer creates a single interceptor out of a chain of many interceptors.
//
// Execution is done in left-to-right order, including passing of context.
// For example ChainUnaryServer(one, two, three) will execute one before two before three.
// If you want to pass context between interceptors, use WrapServerStream.
func ChainStreamServer(interceptors ...grpc.StreamServerInterceptor) grpc.StreamServerInterceptor {
n := len(interceptors)
if n > 1 {
lastI := n - 1
return func(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error {
var (
chainHandler grpc.StreamHandler
curI int
)
chainHandler = func(currentSrv interface{}, currentStream grpc.ServerStream) error {
if curI == lastI {
return handler(currentSrv, currentStream)
}
curI++
err := interceptors[curI](currentSrv, currentStream, info, chainHandler)
curI--
return err
}
return interceptors[0](srv, stream, info, chainHandler)
}
}
if n == 1 {
return interceptors[0]
}
// n == 0; Dummy interceptor maintained for backward compatibility to avoid returning nil.
return func(srv interface{}, stream grpc.ServerStream, _ *grpc.StreamServerInfo, handler grpc.StreamHandler) error {
return handler(srv, stream)
}
}
// ChainUnaryClient creates a single interceptor out of a chain of many interceptors.
//
// Execution is done in left-to-right order, including passing of context.
// For example ChainUnaryClient(one, two, three) will execute one before two before three.
func ChainUnaryClient(interceptors ...grpc.UnaryClientInterceptor) grpc.UnaryClientInterceptor {
n := len(interceptors)
if n > 1 {
lastI := n - 1
return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
var (
chainHandler grpc.UnaryInvoker
curI int
)
chainHandler = func(currentCtx context.Context, currentMethod string, currentReq, currentRepl interface{}, currentConn *grpc.ClientConn, currentOpts ...grpc.CallOption) error {
if curI == lastI {
return invoker(currentCtx, currentMethod, currentReq, currentRepl, currentConn, currentOpts...)
}
curI++
err := interceptors[curI](currentCtx, currentMethod, currentReq, currentRepl, currentConn, chainHandler, currentOpts...)
curI--
return err
}
return interceptors[0](ctx, method, req, reply, cc, chainHandler, opts...)
}
}
if n == 1 {
return interceptors[0]
}
// n == 0; Dummy interceptor maintained for backward compatibility to avoid returning nil.
return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
return invoker(ctx, method, req, reply, cc, opts...)
}
}
// ChainStreamClient creates a single interceptor out of a chain of many interceptors.
//
// Execution is done in left-to-right order, including passing of context.
// For example ChainStreamClient(one, two, three) will execute one before two before three.
func ChainStreamClient(interceptors ...grpc.StreamClientInterceptor) grpc.StreamClientInterceptor {
n := len(interceptors)
if n > 1 {
lastI := n - 1
return func(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) {
var (
chainHandler grpc.Streamer
curI int
)
chainHandler = func(currentCtx context.Context, currentDesc *grpc.StreamDesc, currentConn *grpc.ClientConn, currentMethod string, currentOpts ...grpc.CallOption) (grpc.ClientStream, error) {
if curI == lastI {
return streamer(currentCtx, currentDesc, currentConn, currentMethod, currentOpts...)
}
curI++
stream, err := interceptors[curI](currentCtx, currentDesc, currentConn, currentMethod, chainHandler, currentOpts...)
curI--
return stream, err
}
return interceptors[0](ctx, desc, cc, method, chainHandler, opts...)
}
}
if n == 1 {
return interceptors[0]
}
// n == 0; Dummy interceptor maintained for backward compatibility to avoid returning nil.
return func(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) {
return streamer(ctx, desc, cc, method, opts...)
}
}
// Chain creates a single interceptor out of a chain of many interceptors.
//
// WithUnaryServerChain is a grpc.Server config option that accepts multiple unary interceptors.
// Basically syntactic sugar.
func WithUnaryServerChain(interceptors ...grpc.UnaryServerInterceptor) grpc.ServerOption {
return grpc.UnaryInterceptor(ChainUnaryServer(interceptors...))
}
// WithStreamServerChain is a grpc.Server config option that accepts multiple stream interceptors.
// Basically syntactic sugar.
func WithStreamServerChain(interceptors ...grpc.StreamServerInterceptor) grpc.ServerOption {
return grpc.StreamInterceptor(ChainStreamServer(interceptors...))
}

View File

@ -0,0 +1,69 @@
// Copyright 2016 Michal Witkowski. All Rights Reserved.
// See LICENSE for licensing terms.
/*
`grpc_middleware` is a collection of gRPC middleware packages: interceptors, helpers and tools.
Middleware
gRPC is a fantastic RPC middleware, which sees a lot of adoption in the Golang world. However, the
upstream gRPC codebase is relatively bare bones.
This package, and most of its child packages provides commonly needed middleware for gRPC:
client-side interceptors for retires, server-side interceptors for input validation and auth,
functions for chaining said interceptors, metadata convenience methods and more.
Chaining
By default, gRPC doesn't allow one to have more than one interceptor either on the client nor on
the server side. `grpc_middleware` provides convenient chaining methods
Simple way of turning a multiple interceptors into a single interceptor. Here's an example for
server chaining:
myServer := grpc.NewServer(
grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(loggingStream, monitoringStream, authStream)),
grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(loggingUnary, monitoringUnary, authUnary),
)
These interceptors will be executed from left to right: logging, monitoring and auth.
Here's an example for client side chaining:
clientConn, err = grpc.Dial(
address,
grpc.WithUnaryInterceptor(grpc_middleware.ChainUnaryClient(monitoringClientUnary, retryUnary)),
grpc.WithStreamInterceptor(grpc_middleware.ChainStreamClient(monitoringClientStream, retryStream)),
)
client = pb_testproto.NewTestServiceClient(clientConn)
resp, err := client.PingEmpty(s.ctx, &myservice.Request{Msg: "hello"})
These interceptors will be executed from left to right: monitoring and then retry logic.
The retry interceptor will call every interceptor that follows it whenever when a retry happens.
Writing Your Own
Implementing your own interceptor is pretty trivial: there are interfaces for that. But the interesting
bit exposing common data to handlers (and other middleware), similarly to HTTP Middleware design.
For example, you may want to pass the identity of the caller from the auth interceptor all the way
to the handling function.
For example, a client side interceptor example for auth looks like:
func FakeAuthUnaryInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
newCtx := context.WithValue(ctx, "user_id", "john@example.com")
return handler(newCtx, req)
}
Unfortunately, it's not as easy for streaming RPCs. These have the `context.Context` embedded within
the `grpc.ServerStream` object. To pass values through context, a wrapper (`WrappedServerStream`) is
needed. For example:
func FakeAuthStreamingInterceptor(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error {
newStream := grpc_middleware.WrapServerStream(stream)
newStream.WrappedContext = context.WithValue(ctx, "user_id", "john@example.com")
return handler(srv, stream)
}
*/
package grpc_middleware

View File

@ -0,0 +1,16 @@
SHELL=/bin/bash
GOFILES_NOVENDOR = $(shell go list ./... | grep -v /vendor/)
all: vet fmt test
fmt:
go fmt $(GOFILES_NOVENDOR)
vet:
go vet $(GOFILES_NOVENDOR)
test: vet
./scripts/test_all.sh
.PHONY: all test

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

View File

@ -0,0 +1,29 @@
// Copyright 2016 Michal Witkowski. All Rights Reserved.
// See LICENSE for licensing terms.
package grpc_middleware
import (
"golang.org/x/net/context"
"google.golang.org/grpc"
)
// WrappedServerStream is a thin wrapper around grpc.ServerStream that allows modifying context.
type WrappedServerStream struct {
grpc.ServerStream
// WrappedContext is the wrapper's own Context. You can assign it.
WrappedContext context.Context
}
// Context returns the wrapper's WrappedContext, overwriting the nested grpc.ServerStream.Context()
func (w *WrappedServerStream) Context() context.Context {
return w.WrappedContext
}
// WrapServerStream returns a ServerStream that has the ability to overwrite context.
func WrapServerStream(stream grpc.ServerStream) *WrappedServerStream {
if existing, ok := stream.(*WrappedServerStream); ok {
return existing
}
return &WrappedServerStream{ServerStream: stream, WrappedContext: stream.Context()}
}

View File

@ -0,0 +1,27 @@
Copyright (c) 2015, Gengo, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Gengo, Inc. nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,22 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
package(default_visibility = ["//visibility:public"])
proto_library(
name = "internal_proto",
srcs = ["stream_chunk.proto"],
deps = ["@com_google_protobuf//:any_proto"],
)
go_proto_library(
name = "internal_go_proto",
importpath = "github.com/grpc-ecosystem/grpc-gateway/internal",
proto = ":internal_proto",
)
go_library(
name = "go_default_library",
embed = [":internal_go_proto"],
importpath = "github.com/grpc-ecosystem/grpc-gateway/internal",
)

View File

@ -0,0 +1,118 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: internal/stream_chunk.proto
package internal
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import any "github.com/golang/protobuf/ptypes/any"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// StreamError is a response type which is returned when
// streaming rpc returns an error.
type StreamError struct {
GrpcCode int32 `protobuf:"varint,1,opt,name=grpc_code,json=grpcCode,proto3" json:"grpc_code,omitempty"`
HttpCode int32 `protobuf:"varint,2,opt,name=http_code,json=httpCode,proto3" json:"http_code,omitempty"`
Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"`
HttpStatus string `protobuf:"bytes,4,opt,name=http_status,json=httpStatus,proto3" json:"http_status,omitempty"`
Details []*any.Any `protobuf:"bytes,5,rep,name=details,proto3" json:"details,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *StreamError) Reset() { *m = StreamError{} }
func (m *StreamError) String() string { return proto.CompactTextString(m) }
func (*StreamError) ProtoMessage() {}
func (*StreamError) Descriptor() ([]byte, []int) {
return fileDescriptor_stream_chunk_a2afb657504565d7, []int{0}
}
func (m *StreamError) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_StreamError.Unmarshal(m, b)
}
func (m *StreamError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_StreamError.Marshal(b, m, deterministic)
}
func (dst *StreamError) XXX_Merge(src proto.Message) {
xxx_messageInfo_StreamError.Merge(dst, src)
}
func (m *StreamError) XXX_Size() int {
return xxx_messageInfo_StreamError.Size(m)
}
func (m *StreamError) XXX_DiscardUnknown() {
xxx_messageInfo_StreamError.DiscardUnknown(m)
}
var xxx_messageInfo_StreamError proto.InternalMessageInfo
func (m *StreamError) GetGrpcCode() int32 {
if m != nil {
return m.GrpcCode
}
return 0
}
func (m *StreamError) GetHttpCode() int32 {
if m != nil {
return m.HttpCode
}
return 0
}
func (m *StreamError) GetMessage() string {
if m != nil {
return m.Message
}
return ""
}
func (m *StreamError) GetHttpStatus() string {
if m != nil {
return m.HttpStatus
}
return ""
}
func (m *StreamError) GetDetails() []*any.Any {
if m != nil {
return m.Details
}
return nil
}
func init() {
proto.RegisterType((*StreamError)(nil), "grpc.gateway.runtime.StreamError")
}
func init() {
proto.RegisterFile("internal/stream_chunk.proto", fileDescriptor_stream_chunk_a2afb657504565d7)
}
var fileDescriptor_stream_chunk_a2afb657504565d7 = []byte{
// 223 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x34, 0x90, 0x41, 0x4e, 0xc3, 0x30,
0x10, 0x45, 0x15, 0x4a, 0x69, 0x3b, 0xd9, 0x45, 0x5d, 0x18, 0xba, 0x20, 0x62, 0x95, 0x95, 0x23,
0xc1, 0x09, 0x00, 0x71, 0x81, 0x74, 0xc7, 0xa6, 0x9a, 0x26, 0x83, 0x13, 0x91, 0xd8, 0xd1, 0x78,
0x22, 0x94, 0x6b, 0x71, 0xc2, 0xca, 0x8e, 0xb2, 0xf4, 0x7b, 0x7f, 0xbe, 0xbe, 0x0c, 0xa7, 0xce,
0x0a, 0xb1, 0xc5, 0xbe, 0xf4, 0xc2, 0x84, 0xc3, 0xa5, 0x6e, 0x27, 0xfb, 0xab, 0x47, 0x76, 0xe2,
0xb2, 0xa3, 0xe1, 0xb1, 0xd6, 0x06, 0x85, 0xfe, 0x70, 0xd6, 0x3c, 0x59, 0xe9, 0x06, 0x7a, 0x7a,
0x34, 0xce, 0x99, 0x9e, 0xca, 0x98, 0xb9, 0x4e, 0x3f, 0x25, 0xda, 0x79, 0x39, 0x78, 0xf9, 0x4f,
0x20, 0x3d, 0xc7, 0x9e, 0x2f, 0x66, 0xc7, 0xd9, 0x09, 0x0e, 0xa1, 0xe2, 0x52, 0xbb, 0x86, 0x54,
0x92, 0x27, 0xc5, 0xb6, 0xda, 0x07, 0xf0, 0xe9, 0x1a, 0x0a, 0xb2, 0x15, 0x19, 0x17, 0x79, 0xb7,
0xc8, 0x00, 0xa2, 0x54, 0xb0, 0x1b, 0xc8, 0x7b, 0x34, 0xa4, 0x36, 0x79, 0x52, 0x1c, 0xaa, 0xf5,
0x99, 0x3d, 0x43, 0x1a, 0xcf, 0xbc, 0xa0, 0x4c, 0x5e, 0xdd, 0x47, 0x0b, 0x01, 0x9d, 0x23, 0xc9,
0x34, 0xec, 0x1a, 0x12, 0xec, 0x7a, 0xaf, 0xb6, 0xf9, 0xa6, 0x48, 0x5f, 0x8f, 0x7a, 0x59, 0xac,
0xd7, 0xc5, 0xfa, 0xdd, 0xce, 0xd5, 0x1a, 0xfa, 0x80, 0xef, 0xfd, 0xfa, 0x09, 0xd7, 0x87, 0x18,
0x79, 0xbb, 0x05, 0x00, 0x00, 0xff, 0xff, 0x0d, 0x7d, 0xa5, 0x18, 0x17, 0x01, 0x00, 0x00,
}

View File

@ -0,0 +1,15 @@
syntax = "proto3";
package grpc.gateway.runtime;
option go_package = "internal";
import "google/protobuf/any.proto";
// StreamError is a response type which is returned when
// streaming rpc returns an error.
message StreamError {
int32 grpc_code = 1;
int32 http_code = 2;
string message = 3;
string http_status = 4;
repeated google.protobuf.Any details = 5;
}

View File

@ -0,0 +1,84 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
package(default_visibility = ["//visibility:public"])
go_library(
name = "go_default_library",
srcs = [
"context.go",
"convert.go",
"doc.go",
"errors.go",
"fieldmask.go",
"handler.go",
"marshal_httpbodyproto.go",
"marshal_json.go",
"marshal_jsonpb.go",
"marshal_proto.go",
"marshaler.go",
"marshaler_registry.go",
"mux.go",
"pattern.go",
"proto2_convert.go",
"proto_errors.go",
"query.go",
],
importpath = "github.com/grpc-ecosystem/grpc-gateway/runtime",
deps = [
"//internal:go_default_library",
"//utilities:go_default_library",
"@com_github_golang_protobuf//jsonpb:go_default_library_gen",
"@com_github_golang_protobuf//proto:go_default_library",
"@com_github_golang_protobuf//protoc-gen-go/generator:go_default_library_gen",
"@go_googleapis//google/api:httpbody_go_proto",
"@io_bazel_rules_go//proto/wkt:any_go_proto",
"@io_bazel_rules_go//proto/wkt:duration_go_proto",
"@io_bazel_rules_go//proto/wkt:field_mask_go_proto",
"@io_bazel_rules_go//proto/wkt:timestamp_go_proto",
"@io_bazel_rules_go//proto/wkt:wrappers_go_proto",
"@org_golang_google_grpc//codes:go_default_library",
"@org_golang_google_grpc//grpclog:go_default_library",
"@org_golang_google_grpc//metadata:go_default_library",
"@org_golang_google_grpc//status:go_default_library",
],
)
go_test(
name = "go_default_test",
size = "small",
srcs = [
"context_test.go",
"errors_test.go",
"fieldmask_test.go",
"handler_test.go",
"marshal_httpbodyproto_test.go",
"marshal_json_test.go",
"marshal_jsonpb_test.go",
"marshal_proto_test.go",
"marshaler_registry_test.go",
"mux_test.go",
"pattern_test.go",
"query_test.go",
],
embed = [":go_default_library"],
deps = [
"//examples/proto/examplepb:go_default_library",
"//internal:go_default_library",
"//utilities:go_default_library",
"@com_github_golang_protobuf//jsonpb:go_default_library_gen",
"@com_github_golang_protobuf//proto:go_default_library",
"@com_github_golang_protobuf//ptypes:go_default_library_gen",
"@go_googleapis//google/api:httpbody_go_proto",
"@go_googleapis//google/rpc:errdetails_go_proto",
"@io_bazel_rules_go//proto/wkt:duration_go_proto",
"@io_bazel_rules_go//proto/wkt:empty_go_proto",
"@io_bazel_rules_go//proto/wkt:field_mask_go_proto",
"@io_bazel_rules_go//proto/wkt:struct_go_proto",
"@io_bazel_rules_go//proto/wkt:timestamp_go_proto",
"@io_bazel_rules_go//proto/wkt:wrappers_go_proto",
"@org_golang_google_grpc//:go_default_library",
"@org_golang_google_grpc//codes:go_default_library",
"@org_golang_google_grpc//metadata:go_default_library",
"@org_golang_google_grpc//status:go_default_library",
],
)

View File

@ -0,0 +1,210 @@
package runtime
import (
"context"
"encoding/base64"
"fmt"
"net"
"net/http"
"net/textproto"
"strconv"
"strings"
"time"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
)
// MetadataHeaderPrefix is the http prefix that represents custom metadata
// parameters to or from a gRPC call.
const MetadataHeaderPrefix = "Grpc-Metadata-"
// MetadataPrefix is prepended to permanent HTTP header keys (as specified
// by the IANA) when added to the gRPC context.
const MetadataPrefix = "grpcgateway-"
// MetadataTrailerPrefix is prepended to gRPC metadata as it is converted to
// HTTP headers in a response handled by grpc-gateway
const MetadataTrailerPrefix = "Grpc-Trailer-"
const metadataGrpcTimeout = "Grpc-Timeout"
const metadataHeaderBinarySuffix = "-Bin"
const xForwardedFor = "X-Forwarded-For"
const xForwardedHost = "X-Forwarded-Host"
var (
// DefaultContextTimeout is used for gRPC call context.WithTimeout whenever a Grpc-Timeout inbound
// header isn't present. If the value is 0 the sent `context` will not have a timeout.
DefaultContextTimeout = 0 * time.Second
)
func decodeBinHeader(v string) ([]byte, error) {
if len(v)%4 == 0 {
// Input was padded, or padding was not necessary.
return base64.StdEncoding.DecodeString(v)
}
return base64.RawStdEncoding.DecodeString(v)
}
/*
AnnotateContext adds context information such as metadata from the request.
At a minimum, the RemoteAddr is included in the fashion of "X-Forwarded-For",
except that the forwarded destination is not another HTTP service but rather
a gRPC service.
*/
func AnnotateContext(ctx context.Context, mux *ServeMux, req *http.Request) (context.Context, error) {
var pairs []string
timeout := DefaultContextTimeout
if tm := req.Header.Get(metadataGrpcTimeout); tm != "" {
var err error
timeout, err = timeoutDecode(tm)
if err != nil {
return nil, status.Errorf(codes.InvalidArgument, "invalid grpc-timeout: %s", tm)
}
}
for key, vals := range req.Header {
for _, val := range vals {
key = textproto.CanonicalMIMEHeaderKey(key)
// For backwards-compatibility, pass through 'authorization' header with no prefix.
if key == "Authorization" {
pairs = append(pairs, "authorization", val)
}
if h, ok := mux.incomingHeaderMatcher(key); ok {
// Handles "-bin" metadata in grpc, since grpc will do another base64
// encode before sending to server, we need to decode it first.
if strings.HasSuffix(key, metadataHeaderBinarySuffix) {
b, err := decodeBinHeader(val)
if err != nil {
return nil, status.Errorf(codes.InvalidArgument, "invalid binary header %s: %s", key, err)
}
val = string(b)
}
pairs = append(pairs, h, val)
}
}
}
if host := req.Header.Get(xForwardedHost); host != "" {
pairs = append(pairs, strings.ToLower(xForwardedHost), host)
} else if req.Host != "" {
pairs = append(pairs, strings.ToLower(xForwardedHost), req.Host)
}
if addr := req.RemoteAddr; addr != "" {
if remoteIP, _, err := net.SplitHostPort(addr); err == nil {
if fwd := req.Header.Get(xForwardedFor); fwd == "" {
pairs = append(pairs, strings.ToLower(xForwardedFor), remoteIP)
} else {
pairs = append(pairs, strings.ToLower(xForwardedFor), fmt.Sprintf("%s, %s", fwd, remoteIP))
}
} else {
grpclog.Infof("invalid remote addr: %s", addr)
}
}
if timeout != 0 {
ctx, _ = context.WithTimeout(ctx, timeout)
}
if len(pairs) == 0 {
return ctx, nil
}
md := metadata.Pairs(pairs...)
for _, mda := range mux.metadataAnnotators {
md = metadata.Join(md, mda(ctx, req))
}
return metadata.NewOutgoingContext(ctx, md), nil
}
// ServerMetadata consists of metadata sent from gRPC server.
type ServerMetadata struct {
HeaderMD metadata.MD
TrailerMD metadata.MD
}
type serverMetadataKey struct{}
// NewServerMetadataContext creates a new context with ServerMetadata
func NewServerMetadataContext(ctx context.Context, md ServerMetadata) context.Context {
return context.WithValue(ctx, serverMetadataKey{}, md)
}
// ServerMetadataFromContext returns the ServerMetadata in ctx
func ServerMetadataFromContext(ctx context.Context) (md ServerMetadata, ok bool) {
md, ok = ctx.Value(serverMetadataKey{}).(ServerMetadata)
return
}
func timeoutDecode(s string) (time.Duration, error) {
size := len(s)
if size < 2 {
return 0, fmt.Errorf("timeout string is too short: %q", s)
}
d, ok := timeoutUnitToDuration(s[size-1])
if !ok {
return 0, fmt.Errorf("timeout unit is not recognized: %q", s)
}
t, err := strconv.ParseInt(s[:size-1], 10, 64)
if err != nil {
return 0, err
}
return d * time.Duration(t), nil
}
func timeoutUnitToDuration(u uint8) (d time.Duration, ok bool) {
switch u {
case 'H':
return time.Hour, true
case 'M':
return time.Minute, true
case 'S':
return time.Second, true
case 'm':
return time.Millisecond, true
case 'u':
return time.Microsecond, true
case 'n':
return time.Nanosecond, true
default:
}
return
}
// isPermanentHTTPHeader checks whether hdr belongs to the list of
// permenant request headers maintained by IANA.
// http://www.iana.org/assignments/message-headers/message-headers.xml
func isPermanentHTTPHeader(hdr string) bool {
switch hdr {
case
"Accept",
"Accept-Charset",
"Accept-Language",
"Accept-Ranges",
"Authorization",
"Cache-Control",
"Content-Type",
"Cookie",
"Date",
"Expect",
"From",
"Host",
"If-Match",
"If-Modified-Since",
"If-None-Match",
"If-Schedule-Tag-Match",
"If-Unmodified-Since",
"Max-Forwards",
"Origin",
"Pragma",
"Referer",
"User-Agent",
"Via",
"Warning":
return true
}
return false
}

View File

@ -0,0 +1,312 @@
package runtime
import (
"encoding/base64"
"fmt"
"strconv"
"strings"
"github.com/golang/protobuf/jsonpb"
"github.com/golang/protobuf/ptypes/duration"
"github.com/golang/protobuf/ptypes/timestamp"
"github.com/golang/protobuf/ptypes/wrappers"
)
// String just returns the given string.
// It is just for compatibility to other types.
func String(val string) (string, error) {
return val, nil
}
// StringSlice converts 'val' where individual strings are separated by
// 'sep' into a string slice.
func StringSlice(val, sep string) ([]string, error) {
return strings.Split(val, sep), nil
}
// Bool converts the given string representation of a boolean value into bool.
func Bool(val string) (bool, error) {
return strconv.ParseBool(val)
}
// BoolSlice converts 'val' where individual booleans are separated by
// 'sep' into a bool slice.
func BoolSlice(val, sep string) ([]bool, error) {
s := strings.Split(val, sep)
values := make([]bool, len(s))
for i, v := range s {
value, err := Bool(v)
if err != nil {
return values, err
}
values[i] = value
}
return values, nil
}
// Float64 converts the given string representation into representation of a floating point number into float64.
func Float64(val string) (float64, error) {
return strconv.ParseFloat(val, 64)
}
// Float64Slice converts 'val' where individual floating point numbers are separated by
// 'sep' into a float64 slice.
func Float64Slice(val, sep string) ([]float64, error) {
s := strings.Split(val, sep)
values := make([]float64, len(s))
for i, v := range s {
value, err := Float64(v)
if err != nil {
return values, err
}
values[i] = value
}
return values, nil
}
// Float32 converts the given string representation of a floating point number into float32.
func Float32(val string) (float32, error) {
f, err := strconv.ParseFloat(val, 32)
if err != nil {
return 0, err
}
return float32(f), nil
}
// Float32Slice converts 'val' where individual floating point numbers are separated by
// 'sep' into a float32 slice.
func Float32Slice(val, sep string) ([]float32, error) {
s := strings.Split(val, sep)
values := make([]float32, len(s))
for i, v := range s {
value, err := Float32(v)
if err != nil {
return values, err
}
values[i] = value
}
return values, nil
}
// Int64 converts the given string representation of an integer into int64.
func Int64(val string) (int64, error) {
return strconv.ParseInt(val, 0, 64)
}
// Int64Slice converts 'val' where individual integers are separated by
// 'sep' into a int64 slice.
func Int64Slice(val, sep string) ([]int64, error) {
s := strings.Split(val, sep)
values := make([]int64, len(s))
for i, v := range s {
value, err := Int64(v)
if err != nil {
return values, err
}
values[i] = value
}
return values, nil
}
// Int32 converts the given string representation of an integer into int32.
func Int32(val string) (int32, error) {
i, err := strconv.ParseInt(val, 0, 32)
if err != nil {
return 0, err
}
return int32(i), nil
}
// Int32Slice converts 'val' where individual integers are separated by
// 'sep' into a int32 slice.
func Int32Slice(val, sep string) ([]int32, error) {
s := strings.Split(val, sep)
values := make([]int32, len(s))
for i, v := range s {
value, err := Int32(v)
if err != nil {
return values, err
}
values[i] = value
}
return values, nil
}
// Uint64 converts the given string representation of an integer into uint64.
func Uint64(val string) (uint64, error) {
return strconv.ParseUint(val, 0, 64)
}
// Uint64Slice converts 'val' where individual integers are separated by
// 'sep' into a uint64 slice.
func Uint64Slice(val, sep string) ([]uint64, error) {
s := strings.Split(val, sep)
values := make([]uint64, len(s))
for i, v := range s {
value, err := Uint64(v)
if err != nil {
return values, err
}
values[i] = value
}
return values, nil
}
// Uint32 converts the given string representation of an integer into uint32.
func Uint32(val string) (uint32, error) {
i, err := strconv.ParseUint(val, 0, 32)
if err != nil {
return 0, err
}
return uint32(i), nil
}
// Uint32Slice converts 'val' where individual integers are separated by
// 'sep' into a uint32 slice.
func Uint32Slice(val, sep string) ([]uint32, error) {
s := strings.Split(val, sep)
values := make([]uint32, len(s))
for i, v := range s {
value, err := Uint32(v)
if err != nil {
return values, err
}
values[i] = value
}
return values, nil
}
// Bytes converts the given string representation of a byte sequence into a slice of bytes
// A bytes sequence is encoded in URL-safe base64 without padding
func Bytes(val string) ([]byte, error) {
b, err := base64.StdEncoding.DecodeString(val)
if err != nil {
b, err = base64.URLEncoding.DecodeString(val)
if err != nil {
return nil, err
}
}
return b, nil
}
// BytesSlice converts 'val' where individual bytes sequences, encoded in URL-safe
// base64 without padding, are separated by 'sep' into a slice of bytes slices slice.
func BytesSlice(val, sep string) ([][]byte, error) {
s := strings.Split(val, sep)
values := make([][]byte, len(s))
for i, v := range s {
value, err := Bytes(v)
if err != nil {
return values, err
}
values[i] = value
}
return values, nil
}
// Timestamp converts the given RFC3339 formatted string into a timestamp.Timestamp.
func Timestamp(val string) (*timestamp.Timestamp, error) {
var r *timestamp.Timestamp
err := jsonpb.UnmarshalString(val, r)
return r, err
}
// Duration converts the given string into a timestamp.Duration.
func Duration(val string) (*duration.Duration, error) {
var r *duration.Duration
err := jsonpb.UnmarshalString(val, r)
return r, err
}
// Enum converts the given string into an int32 that should be type casted into the
// correct enum proto type.
func Enum(val string, enumValMap map[string]int32) (int32, error) {
e, ok := enumValMap[val]
if ok {
return e, nil
}
i, err := Int32(val)
if err != nil {
return 0, fmt.Errorf("%s is not valid", val)
}
for _, v := range enumValMap {
if v == i {
return i, nil
}
}
return 0, fmt.Errorf("%s is not valid", val)
}
// EnumSlice converts 'val' where individual enums are separated by 'sep'
// into a int32 slice. Each individual int32 should be type casted into the
// correct enum proto type.
func EnumSlice(val, sep string, enumValMap map[string]int32) ([]int32, error) {
s := strings.Split(val, sep)
values := make([]int32, len(s))
for i, v := range s {
value, err := Enum(v, enumValMap)
if err != nil {
return values, err
}
values[i] = value
}
return values, nil
}
/*
Support fot google.protobuf.wrappers on top of primitive types
*/
// StringValue well-known type support as wrapper around string type
func StringValue(val string) (*wrappers.StringValue, error) {
return &wrappers.StringValue{Value: val}, nil
}
// FloatValue well-known type support as wrapper around float32 type
func FloatValue(val string) (*wrappers.FloatValue, error) {
parsedVal, err := Float32(val)
return &wrappers.FloatValue{Value: parsedVal}, err
}
// DoubleValue well-known type support as wrapper around float64 type
func DoubleValue(val string) (*wrappers.DoubleValue, error) {
parsedVal, err := Float64(val)
return &wrappers.DoubleValue{Value: parsedVal}, err
}
// BoolValue well-known type support as wrapper around bool type
func BoolValue(val string) (*wrappers.BoolValue, error) {
parsedVal, err := Bool(val)
return &wrappers.BoolValue{Value: parsedVal}, err
}
// Int32Value well-known type support as wrapper around int32 type
func Int32Value(val string) (*wrappers.Int32Value, error) {
parsedVal, err := Int32(val)
return &wrappers.Int32Value{Value: parsedVal}, err
}
// UInt32Value well-known type support as wrapper around uint32 type
func UInt32Value(val string) (*wrappers.UInt32Value, error) {
parsedVal, err := Uint32(val)
return &wrappers.UInt32Value{Value: parsedVal}, err
}
// Int64Value well-known type support as wrapper around int64 type
func Int64Value(val string) (*wrappers.Int64Value, error) {
parsedVal, err := Int64(val)
return &wrappers.Int64Value{Value: parsedVal}, err
}
// UInt64Value well-known type support as wrapper around uint64 type
func UInt64Value(val string) (*wrappers.UInt64Value, error) {
parsedVal, err := Uint64(val)
return &wrappers.UInt64Value{Value: parsedVal}, err
}
// BytesValue well-known type support as wrapper around bytes[] type
func BytesValue(val string) (*wrappers.BytesValue, error) {
parsedVal, err := Bytes(val)
return &wrappers.BytesValue{Value: parsedVal}, err
}

View File

@ -0,0 +1,5 @@
/*
Package runtime contains runtime helper functions used by
servers which protoc-gen-grpc-gateway generates.
*/
package runtime

View File

@ -0,0 +1,146 @@
package runtime
import (
"context"
"io"
"net/http"
"github.com/golang/protobuf/proto"
"github.com/golang/protobuf/ptypes/any"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/status"
)
// HTTPStatusFromCode converts a gRPC error code into the corresponding HTTP response status.
// See: https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto
func HTTPStatusFromCode(code codes.Code) int {
switch code {
case codes.OK:
return http.StatusOK
case codes.Canceled:
return http.StatusRequestTimeout
case codes.Unknown:
return http.StatusInternalServerError
case codes.InvalidArgument:
return http.StatusBadRequest
case codes.DeadlineExceeded:
return http.StatusGatewayTimeout
case codes.NotFound:
return http.StatusNotFound
case codes.AlreadyExists:
return http.StatusConflict
case codes.PermissionDenied:
return http.StatusForbidden
case codes.Unauthenticated:
return http.StatusUnauthorized
case codes.ResourceExhausted:
return http.StatusTooManyRequests
case codes.FailedPrecondition:
// Note, this deliberately doesn't translate to the similarly named '412 Precondition Failed' HTTP response status.
return http.StatusBadRequest
case codes.Aborted:
return http.StatusConflict
case codes.OutOfRange:
return http.StatusBadRequest
case codes.Unimplemented:
return http.StatusNotImplemented
case codes.Internal:
return http.StatusInternalServerError
case codes.Unavailable:
return http.StatusServiceUnavailable
case codes.DataLoss:
return http.StatusInternalServerError
}
grpclog.Infof("Unknown gRPC error code: %v", code)
return http.StatusInternalServerError
}
var (
// HTTPError replies to the request with the error.
// You can set a custom function to this variable to customize error format.
HTTPError = DefaultHTTPError
// OtherErrorHandler handles the following error used by the gateway: StatusMethodNotAllowed StatusNotFound and StatusBadRequest
OtherErrorHandler = DefaultOtherErrorHandler
)
type errorBody struct {
Error string `protobuf:"bytes,1,name=error" json:"error"`
// This is to make the error more compatible with users that expect errors to be Status objects:
// https://github.com/grpc/grpc/blob/master/src/proto/grpc/status/status.proto
// It should be the exact same message as the Error field.
Message string `protobuf:"bytes,1,name=message" json:"message"`
Code int32 `protobuf:"varint,2,name=code" json:"code"`
Details []*any.Any `protobuf:"bytes,3,rep,name=details" json:"details,omitempty"`
}
// Make this also conform to proto.Message for builtin JSONPb Marshaler
func (e *errorBody) Reset() { *e = errorBody{} }
func (e *errorBody) String() string { return proto.CompactTextString(e) }
func (*errorBody) ProtoMessage() {}
// DefaultHTTPError is the default implementation of HTTPError.
// If "err" is an error from gRPC system, the function replies with the status code mapped by HTTPStatusFromCode.
// If otherwise, it replies with http.StatusInternalServerError.
//
// The response body returned by this function is a JSON object,
// which contains a member whose key is "error" and whose value is err.Error().
func DefaultHTTPError(ctx context.Context, mux *ServeMux, marshaler Marshaler, w http.ResponseWriter, _ *http.Request, err error) {
const fallback = `{"error": "failed to marshal error message"}`
s, ok := status.FromError(err)
if !ok {
s = status.New(codes.Unknown, err.Error())
}
w.Header().Del("Trailer")
contentType := marshaler.ContentType()
// Check marshaler on run time in order to keep backwards compatability
// An interface param needs to be added to the ContentType() function on
// the Marshal interface to be able to remove this check
if httpBodyMarshaler, ok := marshaler.(*HTTPBodyMarshaler); ok {
pb := s.Proto()
contentType = httpBodyMarshaler.ContentTypeFromMessage(pb)
}
w.Header().Set("Content-Type", contentType)
body := &errorBody{
Error: s.Message(),
Message: s.Message(),
Code: int32(s.Code()),
Details: s.Proto().GetDetails(),
}
buf, merr := marshaler.Marshal(body)
if merr != nil {
grpclog.Infof("Failed to marshal error message %q: %v", body, merr)
w.WriteHeader(http.StatusInternalServerError)
if _, err := io.WriteString(w, fallback); err != nil {
grpclog.Infof("Failed to write response: %v", err)
}
return
}
md, ok := ServerMetadataFromContext(ctx)
if !ok {
grpclog.Infof("Failed to extract ServerMetadata from context")
}
handleForwardResponseServerMetadata(w, mux, md)
handleForwardResponseTrailerHeader(w, md)
st := HTTPStatusFromCode(s.Code())
w.WriteHeader(st)
if _, err := w.Write(buf); err != nil {
grpclog.Infof("Failed to write response: %v", err)
}
handleForwardResponseTrailer(w, md)
}
// DefaultOtherErrorHandler is the default implementation of OtherErrorHandler.
// It simply writes a string representation of the given error into "w".
func DefaultOtherErrorHandler(w http.ResponseWriter, _ *http.Request, msg string, code int) {
http.Error(w, msg, code)
}

View File

@ -0,0 +1,70 @@
package runtime
import (
"encoding/json"
"io"
"strings"
"github.com/golang/protobuf/protoc-gen-go/generator"
"google.golang.org/genproto/protobuf/field_mask"
)
// FieldMaskFromRequestBody creates a FieldMask printing all complete paths from the JSON body.
func FieldMaskFromRequestBody(r io.Reader) (*field_mask.FieldMask, error) {
fm := &field_mask.FieldMask{}
var root interface{}
if err := json.NewDecoder(r).Decode(&root); err != nil {
if err == io.EOF {
return fm, nil
}
return nil, err
}
queue := []fieldMaskPathItem{{node: root}}
for len(queue) > 0 {
// dequeue an item
item := queue[0]
queue = queue[1:]
if m, ok := item.node.(map[string]interface{}); ok {
// if the item is an object, then enqueue all of its children
for k, v := range m {
queue = append(queue, fieldMaskPathItem{path: append(item.path, generator.CamelCase(k)), node: v})
}
} else if len(item.path) > 0 {
// otherwise, it's a leaf node so print its path
fm.Paths = append(fm.Paths, strings.Join(item.path, "."))
}
}
return fm, nil
}
// fieldMaskPathItem stores a in-progress deconstruction of a path for a fieldmask
type fieldMaskPathItem struct {
// the list of prior fields leading up to node
path []string
// a generic decoded json object the current item to inspect for further path extraction
node interface{}
}
// CamelCaseFieldMask updates the given FieldMask by converting all of its paths to CamelCase, using the same heuristic
// that's used for naming protobuf fields in Go.
func CamelCaseFieldMask(mask *field_mask.FieldMask) {
if mask == nil || mask.Paths == nil {
return
}
var newPaths []string
for _, path := range mask.Paths {
lowerCasedParts := strings.Split(path, ".")
var camelCasedParts []string
for _, part := range lowerCasedParts {
camelCasedParts = append(camelCasedParts, generator.CamelCase(part))
}
newPaths = append(newPaths, strings.Join(camelCasedParts, "."))
}
mask.Paths = newPaths
}

View File

@ -0,0 +1,209 @@
package runtime
import (
"errors"
"fmt"
"io"
"net/http"
"net/textproto"
"context"
"github.com/golang/protobuf/proto"
"github.com/grpc-ecosystem/grpc-gateway/internal"
"google.golang.org/grpc/grpclog"
)
var errEmptyResponse = errors.New("empty response")
// ForwardResponseStream forwards the stream from gRPC server to REST client.
func ForwardResponseStream(ctx context.Context, mux *ServeMux, marshaler Marshaler, w http.ResponseWriter, req *http.Request, recv func() (proto.Message, error), opts ...func(context.Context, http.ResponseWriter, proto.Message) error) {
f, ok := w.(http.Flusher)
if !ok {
grpclog.Infof("Flush not supported in %T", w)
http.Error(w, "unexpected type of web server", http.StatusInternalServerError)
return
}
md, ok := ServerMetadataFromContext(ctx)
if !ok {
grpclog.Infof("Failed to extract ServerMetadata from context")
http.Error(w, "unexpected error", http.StatusInternalServerError)
return
}
handleForwardResponseServerMetadata(w, mux, md)
w.Header().Set("Transfer-Encoding", "chunked")
w.Header().Set("Content-Type", marshaler.ContentType())
if err := handleForwardResponseOptions(ctx, w, nil, opts); err != nil {
HTTPError(ctx, mux, marshaler, w, req, err)
return
}
var delimiter []byte
if d, ok := marshaler.(Delimited); ok {
delimiter = d.Delimiter()
} else {
delimiter = []byte("\n")
}
var wroteHeader bool
for {
resp, err := recv()
if err == io.EOF {
return
}
if err != nil {
handleForwardResponseStreamError(ctx, wroteHeader, marshaler, w, req, mux, err)
return
}
if err := handleForwardResponseOptions(ctx, w, resp, opts); err != nil {
handleForwardResponseStreamError(ctx, wroteHeader, marshaler, w, req, mux, err)
return
}
buf, err := marshaler.Marshal(streamChunk(ctx, resp, mux.streamErrorHandler))
if err != nil {
grpclog.Infof("Failed to marshal response chunk: %v", err)
handleForwardResponseStreamError(ctx, wroteHeader, marshaler, w, req, mux, err)
return
}
if _, err = w.Write(buf); err != nil {
grpclog.Infof("Failed to send response chunk: %v", err)
return
}
wroteHeader = true
if _, err = w.Write(delimiter); err != nil {
grpclog.Infof("Failed to send delimiter chunk: %v", err)
return
}
f.Flush()
}
}
func handleForwardResponseServerMetadata(w http.ResponseWriter, mux *ServeMux, md ServerMetadata) {
for k, vs := range md.HeaderMD {
if h, ok := mux.outgoingHeaderMatcher(k); ok {
for _, v := range vs {
w.Header().Add(h, v)
}
}
}
}
func handleForwardResponseTrailerHeader(w http.ResponseWriter, md ServerMetadata) {
for k := range md.TrailerMD {
tKey := textproto.CanonicalMIMEHeaderKey(fmt.Sprintf("%s%s", MetadataTrailerPrefix, k))
w.Header().Add("Trailer", tKey)
}
}
func handleForwardResponseTrailer(w http.ResponseWriter, md ServerMetadata) {
for k, vs := range md.TrailerMD {
tKey := fmt.Sprintf("%s%s", MetadataTrailerPrefix, k)
for _, v := range vs {
w.Header().Add(tKey, v)
}
}
}
// responseBody interface contains method for getting field for marshaling to the response body
// this method is generated for response struct from the value of `response_body` in the `google.api.HttpRule`
type responseBody interface {
XXX_ResponseBody() interface{}
}
// ForwardResponseMessage forwards the message "resp" from gRPC server to REST client.
func ForwardResponseMessage(ctx context.Context, mux *ServeMux, marshaler Marshaler, w http.ResponseWriter, req *http.Request, resp proto.Message, opts ...func(context.Context, http.ResponseWriter, proto.Message) error) {
md, ok := ServerMetadataFromContext(ctx)
if !ok {
grpclog.Infof("Failed to extract ServerMetadata from context")
}
handleForwardResponseServerMetadata(w, mux, md)
handleForwardResponseTrailerHeader(w, md)
contentType := marshaler.ContentType()
// Check marshaler on run time in order to keep backwards compatability
// An interface param needs to be added to the ContentType() function on
// the Marshal interface to be able to remove this check
if httpBodyMarshaler, ok := marshaler.(*HTTPBodyMarshaler); ok {
contentType = httpBodyMarshaler.ContentTypeFromMessage(resp)
}
w.Header().Set("Content-Type", contentType)
if err := handleForwardResponseOptions(ctx, w, resp, opts); err != nil {
HTTPError(ctx, mux, marshaler, w, req, err)
return
}
var buf []byte
var err error
if rb, ok := resp.(responseBody); ok {
buf, err = marshaler.Marshal(rb.XXX_ResponseBody())
} else {
buf, err = marshaler.Marshal(resp)
}
if err != nil {
grpclog.Infof("Marshal error: %v", err)
HTTPError(ctx, mux, marshaler, w, req, err)
return
}
if _, err = w.Write(buf); err != nil {
grpclog.Infof("Failed to write response: %v", err)
}
handleForwardResponseTrailer(w, md)
}
func handleForwardResponseOptions(ctx context.Context, w http.ResponseWriter, resp proto.Message, opts []func(context.Context, http.ResponseWriter, proto.Message) error) error {
if len(opts) == 0 {
return nil
}
for _, opt := range opts {
if err := opt(ctx, w, resp); err != nil {
grpclog.Infof("Error handling ForwardResponseOptions: %v", err)
return err
}
}
return nil
}
func handleForwardResponseStreamError(ctx context.Context, wroteHeader bool, marshaler Marshaler, w http.ResponseWriter, req *http.Request, mux *ServeMux, err error) {
serr := streamError(ctx, mux.streamErrorHandler, err)
if !wroteHeader {
w.WriteHeader(int(serr.HttpCode))
}
buf, merr := marshaler.Marshal(errorChunk(serr))
if merr != nil {
grpclog.Infof("Failed to marshal an error: %v", merr)
return
}
if _, werr := w.Write(buf); werr != nil {
grpclog.Infof("Failed to notify error to client: %v", werr)
return
}
}
// streamChunk returns a chunk in a response stream for the given result. The
// given errHandler is used to render an error chunk if result is nil.
func streamChunk(ctx context.Context, result proto.Message, errHandler StreamErrorHandlerFunc) map[string]proto.Message {
if result == nil {
return errorChunk(streamError(ctx, errHandler, errEmptyResponse))
}
return map[string]proto.Message{"result": result}
}
// streamError returns the payload for the final message in a response stream
// that represents the given err.
func streamError(ctx context.Context, errHandler StreamErrorHandlerFunc, err error) *StreamError {
serr := errHandler(ctx, err)
if serr != nil {
return serr
}
// TODO: log about misbehaving stream error handler?
return DefaultHTTPStreamErrorHandler(ctx, err)
}
func errorChunk(err *StreamError) map[string]proto.Message {
return map[string]proto.Message{"error": (*internal.StreamError)(err)}
}

View File

@ -0,0 +1,43 @@
package runtime
import (
"google.golang.org/genproto/googleapis/api/httpbody"
)
// SetHTTPBodyMarshaler overwrite the default marshaler with the HTTPBodyMarshaler
func SetHTTPBodyMarshaler(serveMux *ServeMux) {
serveMux.marshalers.mimeMap[MIMEWildcard] = &HTTPBodyMarshaler{
Marshaler: &JSONPb{OrigName: true},
}
}
// HTTPBodyMarshaler is a Marshaler which supports marshaling of a
// google.api.HttpBody message as the full response body if it is
// the actual message used as the response. If not, then this will
// simply fallback to the Marshaler specified as its default Marshaler.
type HTTPBodyMarshaler struct {
Marshaler
}
// ContentType implementation to keep backwards compatability with marshal interface
func (h *HTTPBodyMarshaler) ContentType() string {
return h.ContentTypeFromMessage(nil)
}
// ContentTypeFromMessage in case v is a google.api.HttpBody message it returns
// its specified content type otherwise fall back to the default Marshaler.
func (h *HTTPBodyMarshaler) ContentTypeFromMessage(v interface{}) string {
if httpBody, ok := v.(*httpbody.HttpBody); ok {
return httpBody.GetContentType()
}
return h.Marshaler.ContentType()
}
// Marshal marshals "v" by returning the body bytes if v is a
// google.api.HttpBody message, otherwise it falls back to the default Marshaler.
func (h *HTTPBodyMarshaler) Marshal(v interface{}) ([]byte, error) {
if httpBody, ok := v.(*httpbody.HttpBody); ok {
return httpBody.Data, nil
}
return h.Marshaler.Marshal(v)
}

Some files were not shown because too many files have changed in this diff Show More