Remove vendored dependencies
This commit is contained in:
parent
f30e720c7b
commit
53cfa99912
14
.drone1.yml
14
.drone1.yml
@ -56,7 +56,7 @@ steps:
|
|||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
GOFLAGS: '-mod=vendor'
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
commands:
|
commands:
|
||||||
- make build
|
- make build
|
||||||
when:
|
when:
|
||||||
@ -66,7 +66,7 @@ steps:
|
|||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
GOFLAGS: '-mod=vendor'
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
depends_on: [ build ]
|
depends_on: [ build ]
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- make generate
|
||||||
@ -149,6 +149,8 @@ steps:
|
|||||||
- name: test
|
- name: test
|
||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- make generate
|
||||||
- make test
|
- make test
|
||||||
@ -160,6 +162,7 @@ steps:
|
|||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
VIKUNJA_TESTS_USE_CONFIG: 1
|
VIKUNJA_TESTS_USE_CONFIG: 1
|
||||||
VIKUNJA_DATABASE_TYPE: sqlite
|
VIKUNJA_DATABASE_TYPE: sqlite
|
||||||
commands:
|
commands:
|
||||||
@ -173,6 +176,7 @@ steps:
|
|||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
VIKUNJA_TESTS_USE_CONFIG: 1
|
VIKUNJA_TESTS_USE_CONFIG: 1
|
||||||
VIKUNJA_DATABASE_TYPE: mysql
|
VIKUNJA_DATABASE_TYPE: mysql
|
||||||
VIKUNJA_DATABASE_HOST: test-mysql-unit
|
VIKUNJA_DATABASE_HOST: test-mysql-unit
|
||||||
@ -190,6 +194,7 @@ steps:
|
|||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
VIKUNJA_TESTS_USE_CONFIG: 1
|
VIKUNJA_TESTS_USE_CONFIG: 1
|
||||||
VIKUNJA_DATABASE_TYPE: postgres
|
VIKUNJA_DATABASE_TYPE: postgres
|
||||||
VIKUNJA_DATABASE_HOST: test-postgres-unit
|
VIKUNJA_DATABASE_HOST: test-postgres-unit
|
||||||
@ -207,6 +212,8 @@ steps:
|
|||||||
- name: integration-test
|
- name: integration-test
|
||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- make generate
|
||||||
- make integration-test
|
- make integration-test
|
||||||
@ -218,6 +225,7 @@ steps:
|
|||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
VIKUNJA_TESTS_USE_CONFIG: 1
|
VIKUNJA_TESTS_USE_CONFIG: 1
|
||||||
VIKUNJA_DATABASE_TYPE: sqlite
|
VIKUNJA_DATABASE_TYPE: sqlite
|
||||||
commands:
|
commands:
|
||||||
@ -231,6 +239,7 @@ steps:
|
|||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
VIKUNJA_TESTS_USE_CONFIG: 1
|
VIKUNJA_TESTS_USE_CONFIG: 1
|
||||||
VIKUNJA_DATABASE_TYPE: mysql
|
VIKUNJA_DATABASE_TYPE: mysql
|
||||||
VIKUNJA_DATABASE_HOST: test-mysql-integration
|
VIKUNJA_DATABASE_HOST: test-mysql-integration
|
||||||
@ -248,6 +257,7 @@ steps:
|
|||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
VIKUNJA_TESTS_USE_CONFIG: 1
|
VIKUNJA_TESTS_USE_CONFIG: 1
|
||||||
VIKUNJA_DATABASE_TYPE: postgres
|
VIKUNJA_DATABASE_TYPE: postgres
|
||||||
VIKUNJA_DATABASE_HOST: test-postgres-integration
|
VIKUNJA_DATABASE_HOST: test-postgres-integration
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -21,3 +21,4 @@ pkg/static/templates_vfsdata.go
|
|||||||
files/
|
files/
|
||||||
!pkg/files/
|
!pkg/files/
|
||||||
vikunja-dump*
|
vikunja-dump*
|
||||||
|
vendor/
|
||||||
|
@ -6,7 +6,6 @@ FROM golang:1-alpine AS build-env
|
|||||||
ARG VIKUNJA_VERSION
|
ARG VIKUNJA_VERSION
|
||||||
ENV TAGS "sqlite"
|
ENV TAGS "sqlite"
|
||||||
ENV GO111MODULE=on
|
ENV GO111MODULE=on
|
||||||
ENV GOFLAGS=-mod=vendor
|
|
||||||
|
|
||||||
# Build deps
|
# Build deps
|
||||||
RUN apk --no-cache add build-base git
|
RUN apk --no-cache add build-base git
|
||||||
|
6
Makefile
6
Makefile
@ -13,15 +13,15 @@ else
|
|||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
GOFILES := $(shell find . -name "*.go" -type f ! -path "./vendor/*" ! -path "*/bindata.go")
|
GOFILES := $(shell find . -name "*.go" -type f ! -path "*/bindata.go")
|
||||||
GOFMT ?= gofmt -s
|
GOFMT ?= gofmt -s
|
||||||
|
|
||||||
GOFLAGS := -v -mod=vendor
|
GOFLAGS := -v
|
||||||
EXTRA_GOFLAGS ?=
|
EXTRA_GOFLAGS ?=
|
||||||
|
|
||||||
LDFLAGS := -X "code.vikunja.io/api/pkg/version.Version=$(shell git describe --tags --always --abbrev=10 | sed 's/-/+/' | sed 's/^v//' | sed 's/-g/-/')" -X "main.Tags=$(TAGS)"
|
LDFLAGS := -X "code.vikunja.io/api/pkg/version.Version=$(shell git describe --tags --always --abbrev=10 | sed 's/-/+/' | sed 's/^v//' | sed 's/-g/-/')" -X "main.Tags=$(TAGS)"
|
||||||
|
|
||||||
PACKAGES ?= $(filter-out code.vikunja.io/api/pkg/integrations,$(shell go list -mod=vendor ./... | grep -v /vendor/))
|
PACKAGES ?= $(filter-out code.vikunja.io/api/pkg/integrations,$(shell go list))
|
||||||
SOURCES ?= $(shell find . -name "*.go" -type f)
|
SOURCES ?= $(shell find . -name "*.go" -type f)
|
||||||
|
|
||||||
TAGS ?=
|
TAGS ?=
|
||||||
|
@ -32,9 +32,6 @@ See [testing]({{< ref "test.md">}}).
|
|||||||
|
|
||||||
If you're able to use go modules, you can clone the project wherever you want to and work from there.
|
If you're able to use go modules, you can clone the project wherever you want to and work from there.
|
||||||
|
|
||||||
However, when building or running tests, please supply the `-mod=vendor` flag to go so it builds using the
|
|
||||||
dependencies from the `vendor/` folder.
|
|
||||||
|
|
||||||
#### Development-setup without go modules
|
#### Development-setup without go modules
|
||||||
|
|
||||||
Some internal packages are referenced using their respective package URL. This can become problematic.
|
Some internal packages are referenced using their respective package URL. This can become problematic.
|
||||||
|
1
vendor/code.vikunja.io/web/.gitignore
generated
vendored
1
vendor/code.vikunja.io/web/.gitignore
generated
vendored
@ -1 +0,0 @@
|
|||||||
.idea/
|
|
165
vendor/code.vikunja.io/web/LICENSE
generated
vendored
165
vendor/code.vikunja.io/web/LICENSE
generated
vendored
@ -1,165 +0,0 @@
|
|||||||
GNU LESSER GENERAL PUBLIC LICENSE
|
|
||||||
Version 3, 29 June 2007
|
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
|
||||||
of this license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
|
|
||||||
This version of the GNU Lesser General Public License incorporates
|
|
||||||
the terms and conditions of version 3 of the GNU General Public
|
|
||||||
License, supplemented by the additional permissions listed below.
|
|
||||||
|
|
||||||
0. Additional Definitions.
|
|
||||||
|
|
||||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
|
||||||
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
|
||||||
General Public License.
|
|
||||||
|
|
||||||
"The Library" refers to a covered work governed by this License,
|
|
||||||
other than an Application or a Combined Work as defined below.
|
|
||||||
|
|
||||||
An "Application" is any work that makes use of an interface provided
|
|
||||||
by the Library, but which is not otherwise based on the Library.
|
|
||||||
Defining a subclass of a class defined by the Library is deemed a mode
|
|
||||||
of using an interface provided by the Library.
|
|
||||||
|
|
||||||
A "Combined Work" is a work produced by combining or linking an
|
|
||||||
Application with the Library. The particular version of the Library
|
|
||||||
with which the Combined Work was made is also called the "Linked
|
|
||||||
Version".
|
|
||||||
|
|
||||||
The "Minimal Corresponding Source" for a Combined Work means the
|
|
||||||
Corresponding Source for the Combined Work, excluding any source code
|
|
||||||
for portions of the Combined Work that, considered in isolation, are
|
|
||||||
based on the Application, and not on the Linked Version.
|
|
||||||
|
|
||||||
The "Corresponding Application Code" for a Combined Work means the
|
|
||||||
object code and/or source code for the Application, including any data
|
|
||||||
and utility programs needed for reproducing the Combined Work from the
|
|
||||||
Application, but excluding the System Libraries of the Combined Work.
|
|
||||||
|
|
||||||
1. Exception to Section 3 of the GNU GPL.
|
|
||||||
|
|
||||||
You may convey a covered work under sections 3 and 4 of this License
|
|
||||||
without being bound by section 3 of the GNU GPL.
|
|
||||||
|
|
||||||
2. Conveying Modified Versions.
|
|
||||||
|
|
||||||
If you modify a copy of the Library, and, in your modifications, a
|
|
||||||
facility refers to a function or data to be supplied by an Application
|
|
||||||
that uses the facility (other than as an argument passed when the
|
|
||||||
facility is invoked), then you may convey a copy of the modified
|
|
||||||
version:
|
|
||||||
|
|
||||||
a) under this License, provided that you make a good faith effort to
|
|
||||||
ensure that, in the event an Application does not supply the
|
|
||||||
function or data, the facility still operates, and performs
|
|
||||||
whatever part of its purpose remains meaningful, or
|
|
||||||
|
|
||||||
b) under the GNU GPL, with none of the additional permissions of
|
|
||||||
this License applicable to that copy.
|
|
||||||
|
|
||||||
3. Object Code Incorporating Material from Library Header Files.
|
|
||||||
|
|
||||||
The object code form of an Application may incorporate material from
|
|
||||||
a header file that is part of the Library. You may convey such object
|
|
||||||
code under terms of your choice, provided that, if the incorporated
|
|
||||||
material is not limited to numerical parameters, data structure
|
|
||||||
layouts and accessors, or small macros, inline functions and templates
|
|
||||||
(ten or fewer lines in length), you do both of the following:
|
|
||||||
|
|
||||||
a) Give prominent notice with each copy of the object code that the
|
|
||||||
Library is used in it and that the Library and its use are
|
|
||||||
covered by this License.
|
|
||||||
|
|
||||||
b) Accompany the object code with a copy of the GNU GPL and this license
|
|
||||||
document.
|
|
||||||
|
|
||||||
4. Combined Works.
|
|
||||||
|
|
||||||
You may convey a Combined Work under terms of your choice that,
|
|
||||||
taken together, effectively do not restrict modification of the
|
|
||||||
portions of the Library contained in the Combined Work and reverse
|
|
||||||
engineering for debugging such modifications, if you also do each of
|
|
||||||
the following:
|
|
||||||
|
|
||||||
a) Give prominent notice with each copy of the Combined Work that
|
|
||||||
the Library is used in it and that the Library and its use are
|
|
||||||
covered by this License.
|
|
||||||
|
|
||||||
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
|
||||||
document.
|
|
||||||
|
|
||||||
c) For a Combined Work that displays copyright notices during
|
|
||||||
execution, include the copyright notice for the Library among
|
|
||||||
these notices, as well as a reference directing the user to the
|
|
||||||
copies of the GNU GPL and this license document.
|
|
||||||
|
|
||||||
d) Do one of the following:
|
|
||||||
|
|
||||||
0) Convey the Minimal Corresponding Source under the terms of this
|
|
||||||
License, and the Corresponding Application Code in a form
|
|
||||||
suitable for, and under terms that permit, the user to
|
|
||||||
recombine or relink the Application with a modified version of
|
|
||||||
the Linked Version to produce a modified Combined Work, in the
|
|
||||||
manner specified by section 6 of the GNU GPL for conveying
|
|
||||||
Corresponding Source.
|
|
||||||
|
|
||||||
1) Use a suitable shared library mechanism for linking with the
|
|
||||||
Library. A suitable mechanism is one that (a) uses at run time
|
|
||||||
a copy of the Library already present on the user's computer
|
|
||||||
system, and (b) will operate properly with a modified version
|
|
||||||
of the Library that is interface-compatible with the Linked
|
|
||||||
Version.
|
|
||||||
|
|
||||||
e) Provide Installation Information, but only if you would otherwise
|
|
||||||
be required to provide such information under section 6 of the
|
|
||||||
GNU GPL, and only to the extent that such information is
|
|
||||||
necessary to install and execute a modified version of the
|
|
||||||
Combined Work produced by recombining or relinking the
|
|
||||||
Application with a modified version of the Linked Version. (If
|
|
||||||
you use option 4d0, the Installation Information must accompany
|
|
||||||
the Minimal Corresponding Source and Corresponding Application
|
|
||||||
Code. If you use option 4d1, you must provide the Installation
|
|
||||||
Information in the manner specified by section 6 of the GNU GPL
|
|
||||||
for conveying Corresponding Source.)
|
|
||||||
|
|
||||||
5. Combined Libraries.
|
|
||||||
|
|
||||||
You may place library facilities that are a work based on the
|
|
||||||
Library side by side in a single library together with other library
|
|
||||||
facilities that are not Applications and are not covered by this
|
|
||||||
License, and convey such a combined library under terms of your
|
|
||||||
choice, if you do both of the following:
|
|
||||||
|
|
||||||
a) Accompany the combined library with a copy of the same work based
|
|
||||||
on the Library, uncombined with any other library facilities,
|
|
||||||
conveyed under the terms of this License.
|
|
||||||
|
|
||||||
b) Give prominent notice with the combined library that part of it
|
|
||||||
is a work based on the Library, and explaining where to find the
|
|
||||||
accompanying uncombined form of the same work.
|
|
||||||
|
|
||||||
6. Revised Versions of the GNU Lesser General Public License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions
|
|
||||||
of the GNU Lesser General Public License from time to time. Such new
|
|
||||||
versions will be similar in spirit to the present version, but may
|
|
||||||
differ in detail to address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Library as you received it specifies that a certain numbered version
|
|
||||||
of the GNU Lesser General Public License "or any later version"
|
|
||||||
applies to it, you have the option of following the terms and
|
|
||||||
conditions either of that published version or of any later version
|
|
||||||
published by the Free Software Foundation. If the Library as you
|
|
||||||
received it does not specify a version number of the GNU Lesser
|
|
||||||
General Public License, you may choose any version of the GNU Lesser
|
|
||||||
General Public License ever published by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Library as you received it specifies that a proxy can decide
|
|
||||||
whether future versions of the GNU Lesser General Public License shall
|
|
||||||
apply, that proxy's public statement of acceptance of any version is
|
|
||||||
permanent authorization for you to choose that version for the
|
|
||||||
Library.
|
|
229
vendor/code.vikunja.io/web/Readme.md
generated
vendored
229
vendor/code.vikunja.io/web/Readme.md
generated
vendored
@ -1,229 +0,0 @@
|
|||||||
# Vikunja Web Handler
|
|
||||||
|
|
||||||
[](LICENSE)
|
|
||||||
[](https://goreportcard.com/report/code.vikunja.io/web)
|
|
||||||
|
|
||||||
> When I started Vikunja, I started like everyone else, by writing a bunch of functions to do the logic and then a bunch of
|
|
||||||
handler functions to parse the request data and call the implemented functions to do the logic and eventually return a dataset.
|
|
||||||
After I implemented some functions, I've decided to save me a lot of hassle and put most of that "parse the request and call a
|
|
||||||
processing function"-logic to a general interface to facilitate development and not having to have a lot of similar code all over the place.
|
|
||||||
|
|
||||||
This webhandler was built to be used in a REST-API, it takes and returns JSON, but can also be used in combination with own
|
|
||||||
other handler implementations, enabling a lot of flexibility while develeoping.
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
* Easy to use
|
|
||||||
* Built for REST-APIs
|
|
||||||
* Beautiful error handling built in
|
|
||||||
* Manages rights
|
|
||||||
* Pluggable authentication mechanisms
|
|
||||||
|
|
||||||
## Table of contents
|
|
||||||
|
|
||||||
* [Installation](#installation)
|
|
||||||
* [Todos](#todos)
|
|
||||||
* [CRUDable](#crudable)
|
|
||||||
* [Rights](#rights)
|
|
||||||
* [Handler Config](#handler-config)
|
|
||||||
* [Auth](#auth)
|
|
||||||
* [Logging](#logging)
|
|
||||||
* [Full Example](#full-example)
|
|
||||||
* [Preprocessing](#preprocessing)
|
|
||||||
* [Pagination](#pagination)
|
|
||||||
* [Search](#search)
|
|
||||||
* [Standard web handler](#defining-routes-using-the-standard-web-handler)
|
|
||||||
* [Errors](#errors)
|
|
||||||
* [URL param binder](#how-the-url-param-binder-works)
|
|
||||||
|
|
||||||
### TODOs
|
|
||||||
|
|
||||||
* [x] Improve docs/Merge with the ones of Vikunja
|
|
||||||
* [x] Description of web.HTTPError
|
|
||||||
* [x] Rights methods should return errors (I know, this will break a lot of existing stuff)
|
|
||||||
* [ ] optional Before- and after-{load|update|create} methods which do some preprocessing/after processing like making human-readable names from automatically up counting consts
|
|
||||||
* [ ] "Magic": Check if a passed struct implements Crudable methods and use a general (user defined) function if not
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
Using the web handler in your application is pretty straight forward, simply run `go get -u code.vikunja.io/web` and start using it.
|
|
||||||
|
|
||||||
In order to use the common web handler, the struct must implement the `web.CRUDable` and `web.Rights` interface.
|
|
||||||
|
|
||||||
To learn how to use the handler, take a look at the [handler config](#handler-config) [defining routes](#defining-routes-using-the-standard-web-handler)
|
|
||||||
|
|
||||||
## CRUDable
|
|
||||||
|
|
||||||
This interface defines methods to Create/Read/ReadAll/Update/Delete something. It is defined as followed:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type CRUDable interface {
|
|
||||||
Create(Auth) error
|
|
||||||
ReadOne() error
|
|
||||||
ReadAll(auth Auth, search string, page int64, perPage int64) (result interface{}, resultCount int64, numberOfPages int64, err error)
|
|
||||||
Update() error
|
|
||||||
Delete() error
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Each of these methods is called on an instance of a struct like so:
|
|
||||||
|
|
||||||
```go
|
|
||||||
func (l *List) ReadOne() (err error) {
|
|
||||||
*l, err = GetListByID(l.ID)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
In that case, it takes the `ID` saved in the struct instance, gets the full list object and fills the original object with it.
|
|
||||||
(See [parambinder](#how-the-url-param-binder-works) to understand where that `ID` is coming from in that specific case).
|
|
||||||
|
|
||||||
All functions should behave like this, if they create or update something, the struct instance they are called on should
|
|
||||||
contain the created/updated struct instance. The only exception is `ReadAll()` which returns an interface.
|
|
||||||
Usually this method returns a slice of results because you cannot make an array of a set type (If you know a
|
|
||||||
way to do this, don't hesitate to [drop me a message](https://vikunja.io/en/contact/)).
|
|
||||||
|
|
||||||
## Rights
|
|
||||||
|
|
||||||
This interface defines methods to check for rights on structs. They accept an `Auth`-element as parameter and return a `bool` and `error`.
|
|
||||||
|
|
||||||
The `error` is handled [as usual](#errors).
|
|
||||||
|
|
||||||
The interface is defined as followed:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type Rights interface {
|
|
||||||
IsAdmin(Auth) (bool, error)
|
|
||||||
CanWrite(Auth) (bool, error)
|
|
||||||
CanRead(Auth) (bool, error)
|
|
||||||
CanDelete(Auth) (bool, error)
|
|
||||||
CanUpdate(Auth) (bool, error)
|
|
||||||
CanCreate(Auth) (bool, error)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
When using the standard web handler, all methods are called before their `CRUD` counterparts.
|
|
||||||
Use pointers for methods like `CanRead()` to get the base data of the model first, then check the right and then add addintional data.
|
|
||||||
|
|
||||||
## Handler Config
|
|
||||||
|
|
||||||
The handler has some options which you can (and need to) configure.
|
|
||||||
|
|
||||||
#### Auth
|
|
||||||
|
|
||||||
`Auth` is an interface with some methods to decouple the action of getting the current user from the web handler.
|
|
||||||
The function defined via `Auths` should return a struct which implements the `Auth` interface.
|
|
||||||
|
|
||||||
To define the thing which gets the appropriate auth object, you need to call a middleware like so (After all auth middlewares were called):
|
|
||||||
|
|
||||||
#### Logging
|
|
||||||
|
|
||||||
You can provide your own instance of `logger.Logger` (using [go-logging](https://github.com/op/go-logging)) to the handler.
|
|
||||||
It will use this instance to log errors which are not better specified or things like users trying to do something they're
|
|
||||||
not allowed to do and so on.
|
|
||||||
|
|
||||||
#### MaxItemsPerPage
|
|
||||||
|
|
||||||
Contains the maximum number of items per page.
|
|
||||||
If the client requests more items than this, the number of items requested is set to this value.
|
|
||||||
|
|
||||||
See [pagination](#pagination) for more.
|
|
||||||
|
|
||||||
#### Full Example
|
|
||||||
|
|
||||||
```go
|
|
||||||
handler.SetAuthProvider(&web.Auths{
|
|
||||||
AuthObject: func(echo.Context) (web.Auth, error) {
|
|
||||||
return models.GetCurrentUser(c) // Your functions
|
|
||||||
},
|
|
||||||
})
|
|
||||||
handler.SetLoggingProvider(&log.Log)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Preprocessing
|
|
||||||
|
|
||||||
### Pagination
|
|
||||||
|
|
||||||
The `ReadAll`-method has a number of parameters:
|
|
||||||
|
|
||||||
```go
|
|
||||||
ReadAll(auth Auth, search string, page int, perPage int) (result interface{}, resultCount int, numberOfItems int64, err error)
|
|
||||||
```
|
|
||||||
|
|
||||||
The third parameter contains the requested page, the fourth parameter contains the number of items per page.
|
|
||||||
You should calculate the limits accordingly.
|
|
||||||
|
|
||||||
If the number of items per page are not set by the client, the web handler will pass the maximum number of items per page instead.
|
|
||||||
This makes items per page optional for clients.
|
|
||||||
Take a look at [the config section](#handler-config) for information on how to set that value.
|
|
||||||
|
|
||||||
You need to return a number of things:
|
|
||||||
|
|
||||||
* The result itself, usually a slice
|
|
||||||
* The number of items you return in `result`. Most of the time, this is just `len(result)`. You need to return this value to make the clients aware if they requested a number of items > max items per page.
|
|
||||||
* The total number of items available. We use the total number of items here and not the number pages so the implementations don't have to deal with calculating the number of pages from that. The total number of clients is then calculated and returned to the client, ite can then be used by the clients to build client-side pagination or similar.
|
|
||||||
* An error.
|
|
||||||
|
|
||||||
The number of items and the total number of pages available will be returned in the `x-pagination-total-pages` and `x-pagination-result-count` response headers.
|
|
||||||
_You should put this in your api documentation._
|
|
||||||
|
|
||||||
### Search
|
|
||||||
|
|
||||||
When using the `ReadAll`-method, the first parameter is a search term which should be used to search items of your struct.
|
|
||||||
You define the critera inside of that function.
|
|
||||||
|
|
||||||
Users can then pass the `?s=something` parameter to the url to search, _thats something you should put in your api documentation_.
|
|
||||||
|
|
||||||
As the logic for "give me everything" and "give me everything where the name contains 'something'" is mostly the same, we made
|
|
||||||
the decision to design the function like this, in order to keep the places with mostly the same logic as few as possible.
|
|
||||||
Also just adding `?s=query` to the url one already knows and uses is a lot more convenient.
|
|
||||||
|
|
||||||
## Defining routes using the standard web handler
|
|
||||||
|
|
||||||
You can define routes for the standard web handler like so:
|
|
||||||
|
|
||||||
`models.List` needs to implement `web.CRUDable` and `web.Rights`.
|
|
||||||
|
|
||||||
```go
|
|
||||||
listHandler := &crud.WebHandler{
|
|
||||||
EmptyStruct: func() crud.CObject {
|
|
||||||
return &models.List{}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
a.GET("/lists", listHandler.ReadAllWeb)
|
|
||||||
a.GET("/lists/:list", listHandler.ReadOneWeb)
|
|
||||||
a.POST("/lists/:list", listHandler.UpdateWeb)
|
|
||||||
a.DELETE("/lists/:list", listHandler.DeleteWeb)
|
|
||||||
a.PUT("/namespaces/:namespace/lists", listHandler.CreateWeb)
|
|
||||||
```
|
|
||||||
|
|
||||||
The handler will take care of everything like parsing the request, checking rights, pretty-print errors and return appropriate responses.
|
|
||||||
|
|
||||||
## Errors
|
|
||||||
|
|
||||||
Error types with their messages and http-codes should be implemented by you somewhere in your application and then returned by
|
|
||||||
the appropriate function when an error occures. If the error type implements `HTTPError`, the server returns a user-friendly
|
|
||||||
error message when this error occours. This means it returns a good HTTP status code, a message, and an error code. The error
|
|
||||||
code should be unique across all error codes and can be used on the client to show a localized error message or do other stuff
|
|
||||||
based on the exact error the server returns. That way the client won't have to "guess" that the error message remains the same
|
|
||||||
over multiple versions of your application.
|
|
||||||
|
|
||||||
An `HTTPError` is defined as follows:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type HTTPError struct {
|
|
||||||
HTTPCode int `json:"-"` // Can be any valid HTTP status code, I'd reccomend to use the constants of the http package.
|
|
||||||
Code int `json:"code"` // Must be a uniqe int identifier for this specific error. I'd reccomend defining a constant for this.
|
|
||||||
Message string `json:"message"` // A user-readable message what went wrong.
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
You can learn more about how exactly custom error types are created in the [vikunja docs](https://vikunja.io/docs/custom-errors/).
|
|
||||||
|
|
||||||
## How the url param binder works
|
|
||||||
|
|
||||||
The binder binds all values inside the url to their respective fields in a struct. Those fields need to have a tag
|
|
||||||
`param` with the name of the url placeholder which must be the same as in routes.
|
|
||||||
|
|
||||||
Whenever one of the standard CRUD methods is invoked, this binder is called, which enables one handler method
|
|
||||||
to handle all kinds of different urls with different parameters.
|
|
12
vendor/code.vikunja.io/web/go.mod
generated
vendored
12
vendor/code.vikunja.io/web/go.mod
generated
vendored
@ -1,12 +0,0 @@
|
|||||||
module code.vikunja.io/web
|
|
||||||
|
|
||||||
go 1.14
|
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/labstack/echo/v4 v4.1.7-0.20190627175217-8fb7b5be270f
|
|
||||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
|
|
||||||
golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4 // indirect
|
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 // indirect
|
|
||||||
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb // indirect
|
|
||||||
golang.org/x/tools v0.0.0-20190628034336-212fb13d595e // indirect
|
|
||||||
)
|
|
68
vendor/code.vikunja.io/web/go.sum
generated
vendored
68
vendor/code.vikunja.io/web/go.sum
generated
vendored
@ -1,68 +0,0 @@
|
|||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
|
||||||
github.com/labstack/echo/v4 v4.1.5 h1:RztCXCvfMljychg0G/IzW5T7hL6ADqqwREwcX279Q1g=
|
|
||||||
github.com/labstack/echo/v4 v4.1.5/go.mod h1:3LbYC6VkwmUnmLPZ8WFdHdQHG77e9GQbjyhWdb1QvC4=
|
|
||||||
github.com/labstack/echo/v4 v4.1.6 h1:WOvLa4T1KzWCRpANwz0HGgWDelXSSGwIKtKBbFdHTv4=
|
|
||||||
github.com/labstack/echo/v4 v4.1.6/go.mod h1:kU/7PwzgNxZH4das4XNsSpBSOD09XIF5YEPzjpkGnGE=
|
|
||||||
github.com/labstack/echo/v4 v4.1.7-0.20190627175217-8fb7b5be270f h1:fNJtR+TNyxTdYCZU40fc8Or8RyBqMOKYNv+Zay5gjvk=
|
|
||||||
github.com/labstack/echo/v4 v4.1.7-0.20190627175217-8fb7b5be270f/go.mod h1:kU/7PwzgNxZH4das4XNsSpBSOD09XIF5YEPzjpkGnGE=
|
|
||||||
github.com/labstack/gommon v0.2.8 h1:JvRqmeZcfrHC5u6uVleB4NxxNbzx6gpbJiQknDbKQu0=
|
|
||||||
github.com/labstack/gommon v0.2.8/go.mod h1:/tj9csK2iPSBvn+3NLM9e52usepMtrd5ilFYA+wQNJ4=
|
|
||||||
github.com/labstack/gommon v0.2.9 h1:heVeuAYtevIQVYkGj6A41dtfT91LrvFG220lavpWhrU=
|
|
||||||
github.com/labstack/gommon v0.2.9/go.mod h1:E8ZTmW9vw5az5/ZyHWCp0Lw4OH2ecsaBP1C/NKavGG4=
|
|
||||||
github.com/mattn/go-colorable v0.1.1 h1:G1f5SKeVxmagw/IyvzvtZE4Gybcc4Tr1tf7I8z0XgOg=
|
|
||||||
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
|
|
||||||
github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU=
|
|
||||||
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
|
||||||
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
|
||||||
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
|
|
||||||
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
|
||||||
github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE=
|
|
||||||
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
|
||||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=
|
|
||||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
|
||||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
|
||||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
|
||||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
|
||||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
|
||||||
github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8=
|
|
||||||
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
|
||||||
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
|
||||||
golang.org/x/crypto v0.0.0-20190506204251-e1dfcc566284 h1:rlLehGeYg6jfoyz/eDqDU1iRXLKfR42nnNh57ytKEWo=
|
|
||||||
golang.org/x/crypto v0.0.0-20190506204251-e1dfcc566284/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
|
||||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
|
||||||
golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4 h1:ydJNl0ENAG67pFbB+9tfhiL2pYqLhfoaZFw/cjLhY4A=
|
|
||||||
golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
|
||||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c h1:uOCk1iQW6Vc18bnC13MfzScl+wdKBmM9Y9kU7Z83/lw=
|
|
||||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
|
||||||
golang.org/x/net v0.0.0-20190607181551-461777fb6f67/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=
|
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b h1:ag/x1USPSsqHud38I9BAC88qdNLDHHtQ4mlgQIZPPNA=
|
|
||||||
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20190609082536-301114b31cce/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20190621203818-d432491b9138 h1:t8BZD9RDjkm9/h7yYN6kE8oaeov5r9aztkB7zKA5Tkg=
|
|
||||||
golang.org/x/sys v0.0.0-20190621203818-d432491b9138/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb h1:fgwFCsaw9buMuxNd6+DQfAuSFqbNiQZpcgJQAgJsK6k=
|
|
||||||
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|
||||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
|
||||||
golang.org/x/tools v0.0.0-20190608022120-eacb66d2a7c3/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
|
||||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
|
||||||
golang.org/x/tools v0.0.0-20190628034336-212fb13d595e/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
|
50
vendor/code.vikunja.io/web/handler/config.go
generated
vendored
50
vendor/code.vikunja.io/web/handler/config.go
generated
vendored
@ -1,50 +0,0 @@
|
|||||||
// Copyright (c) 2019 Vikunja and contributors.
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
//
|
|
||||||
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"code.vikunja.io/web"
|
|
||||||
"github.com/op/go-logging"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Config contains the config for the web handler
|
|
||||||
type Config struct {
|
|
||||||
AuthProvider *web.Auths
|
|
||||||
LoggingProvider *logging.Logger
|
|
||||||
MaxItemsPerPage int
|
|
||||||
}
|
|
||||||
|
|
||||||
var config *Config
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
config = &Config{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetAuthProvider sets the auth provider in config
|
|
||||||
func SetAuthProvider(provider *web.Auths) {
|
|
||||||
config.AuthProvider = provider
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetLoggingProvider sets the logging provider in the config
|
|
||||||
func SetLoggingProvider(logger *logging.Logger) {
|
|
||||||
config.LoggingProvider = logger
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetMaxItemsPerPage sets the max number of items per page in the config
|
|
||||||
func SetMaxItemsPerPage(maxItemsPerPage int) {
|
|
||||||
config.MaxItemsPerPage = maxItemsPerPage
|
|
||||||
}
|
|
70
vendor/code.vikunja.io/web/handler/create.go
generated
vendored
70
vendor/code.vikunja.io/web/handler/create.go
generated
vendored
@ -1,70 +0,0 @@
|
|||||||
// Copyright (c) 2018 Vikunja and contributors.
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/labstack/echo/v4"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
// CreateWeb is the handler to create an object
|
|
||||||
func (c *WebHandler) CreateWeb(ctx echo.Context) error {
|
|
||||||
// Get our model
|
|
||||||
currentStruct := c.EmptyStruct()
|
|
||||||
|
|
||||||
// Get the object & bind params to struct
|
|
||||||
if err := ctx.Bind(currentStruct); err != nil {
|
|
||||||
config.LoggingProvider.Debugf("Invalid model error. Internal error was: %s", err.Error())
|
|
||||||
if he, is := err.(*echo.HTTPError); is {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
|
|
||||||
}
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided."))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate the struct
|
|
||||||
if err := ctx.Validate(currentStruct); err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the user to pass for later checks
|
|
||||||
currentAuth, err := config.AuthProvider.AuthObject(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusInternalServerError, "Could not determine the current user.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check rights
|
|
||||||
canCreate, err := currentStruct.CanCreate(currentAuth)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
if !canCreate {
|
|
||||||
config.LoggingProvider.Noticef("Tried to create while not having the rights for it (User: %v)", currentAuth)
|
|
||||||
return echo.NewHTTPError(http.StatusForbidden)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create
|
|
||||||
err = currentStruct.Create(currentAuth)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = ctx.JSON(http.StatusCreated, currentStruct)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
67
vendor/code.vikunja.io/web/handler/delete.go
generated
vendored
67
vendor/code.vikunja.io/web/handler/delete.go
generated
vendored
@ -1,67 +0,0 @@
|
|||||||
// Copyright (c) 2018 Vikunja and contributors.
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/labstack/echo/v4"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
type message struct {
|
|
||||||
Message string `json:"message"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteWeb is the web handler to delete something
|
|
||||||
func (c *WebHandler) DeleteWeb(ctx echo.Context) error {
|
|
||||||
|
|
||||||
// Get our model
|
|
||||||
currentStruct := c.EmptyStruct()
|
|
||||||
|
|
||||||
// Bind params to struct
|
|
||||||
if err := ctx.Bind(currentStruct); err != nil {
|
|
||||||
config.LoggingProvider.Debugf("Invalid model error. Internal error was: %s", err.Error())
|
|
||||||
if he, is := err.(*echo.HTTPError); is {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
|
|
||||||
}
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided."))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the user has the right to delete
|
|
||||||
currentAuth, err := config.AuthProvider.AuthObject(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusInternalServerError)
|
|
||||||
}
|
|
||||||
canDelete, err := currentStruct.CanDelete(currentAuth)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
if !canDelete {
|
|
||||||
config.LoggingProvider.Noticef("Tried to delete while not having the rights for it (User: %v)", currentAuth)
|
|
||||||
return echo.NewHTTPError(http.StatusForbidden)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = currentStruct.Delete()
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = ctx.JSON(http.StatusOK, message{"Successfully deleted."})
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
44
vendor/code.vikunja.io/web/handler/helper.go
generated
vendored
44
vendor/code.vikunja.io/web/handler/helper.go
generated
vendored
@ -1,44 +0,0 @@
|
|||||||
// Copyright (c) 2018 Vikunja and contributors.
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"code.vikunja.io/web"
|
|
||||||
"github.com/labstack/echo/v4"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
// WebHandler defines the webhandler object
|
|
||||||
// This does web stuff, aka returns json etc. Uses CRUDable Methods to get the data
|
|
||||||
type WebHandler struct {
|
|
||||||
EmptyStruct func() CObject
|
|
||||||
}
|
|
||||||
|
|
||||||
// CObject is the definition of our object, holds the structs
|
|
||||||
type CObject interface {
|
|
||||||
web.CRUDable
|
|
||||||
web.Rights
|
|
||||||
}
|
|
||||||
|
|
||||||
// HandleHTTPError does what it says
|
|
||||||
func HandleHTTPError(err error, ctx echo.Context) *echo.HTTPError {
|
|
||||||
if a, has := err.(web.HTTPErrorProcessor); has {
|
|
||||||
errDetails := a.HTTPError()
|
|
||||||
return echo.NewHTTPError(errDetails.HTTPCode, errDetails)
|
|
||||||
}
|
|
||||||
config.LoggingProvider.Error(err.Error())
|
|
||||||
return echo.NewHTTPError(http.StatusInternalServerError)
|
|
||||||
}
|
|
112
vendor/code.vikunja.io/web/handler/read_all.go
generated
vendored
112
vendor/code.vikunja.io/web/handler/read_all.go
generated
vendored
@ -1,112 +0,0 @@
|
|||||||
// Copyright (c) 2018 Vikunja and contributors.
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/labstack/echo/v4"
|
|
||||||
"math"
|
|
||||||
"net/http"
|
|
||||||
"strconv"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ReadAllWeb is the webhandler to get all objects of a type
|
|
||||||
func (c *WebHandler) ReadAllWeb(ctx echo.Context) error {
|
|
||||||
// Get our model
|
|
||||||
currentStruct := c.EmptyStruct()
|
|
||||||
|
|
||||||
currentAuth, err := config.AuthProvider.AuthObject(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusInternalServerError, "Could not determine the current user.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the object & bind params to struct
|
|
||||||
if err := ctx.Bind(currentStruct); err != nil {
|
|
||||||
config.LoggingProvider.Debugf("Invalid model error. Internal error was: %s", err.Error())
|
|
||||||
if he, is := err.(*echo.HTTPError); is {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
|
|
||||||
}
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided."))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pagination
|
|
||||||
page := ctx.QueryParam("page")
|
|
||||||
if page == "" {
|
|
||||||
page = "1"
|
|
||||||
}
|
|
||||||
pageNumber, err := strconv.Atoi(page)
|
|
||||||
if err != nil {
|
|
||||||
config.LoggingProvider.Error(err.Error())
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, "Bad page requested.")
|
|
||||||
}
|
|
||||||
if pageNumber < 0 {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, "Page number cannot be negative.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Items per page
|
|
||||||
var perPageNumber int
|
|
||||||
perPage := ctx.QueryParam("per_page")
|
|
||||||
// If we dont have an "items per page" parameter, we want to use the default.
|
|
||||||
// To prevent Atoi from failing, we check this here.
|
|
||||||
if perPage != "" {
|
|
||||||
perPageNumber, err = strconv.Atoi(perPage)
|
|
||||||
if err != nil {
|
|
||||||
config.LoggingProvider.Error(err.Error())
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, "Bad per page amount requested.")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Set default page count
|
|
||||||
if perPageNumber == 0 {
|
|
||||||
perPageNumber = config.MaxItemsPerPage
|
|
||||||
}
|
|
||||||
if perPageNumber < 1 {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, "Per page amount cannot be negative.")
|
|
||||||
}
|
|
||||||
if perPageNumber > config.MaxItemsPerPage {
|
|
||||||
perPageNumber = config.MaxItemsPerPage
|
|
||||||
}
|
|
||||||
|
|
||||||
// Search
|
|
||||||
search := ctx.QueryParam("s")
|
|
||||||
|
|
||||||
result, resultCount, numberOfItems, err := currentStruct.ReadAll(currentAuth, search, pageNumber, perPageNumber)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate the number of pages from the number of items
|
|
||||||
// We always round up, because if we don't have a number of items which is exactly dividable by the number of items per page,
|
|
||||||
// we would get a result that is one page off.
|
|
||||||
var numberOfPages = math.Ceil(float64(numberOfItems) / float64(perPageNumber))
|
|
||||||
// If we return all results, we only have one page
|
|
||||||
if pageNumber < 0 {
|
|
||||||
numberOfPages = 1
|
|
||||||
}
|
|
||||||
// If we don't have results, we don't have a page
|
|
||||||
if resultCount == 0 {
|
|
||||||
numberOfPages = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.Response().Header().Set("x-pagination-total-pages", strconv.FormatFloat(numberOfPages, 'f', 0, 64))
|
|
||||||
ctx.Response().Header().Set("x-pagination-result-count", strconv.FormatInt(int64(resultCount), 10))
|
|
||||||
ctx.Response().Header().Set("Access-Control-Expose-Headers", "x-pagination-total-pages, x-pagination-result-count")
|
|
||||||
|
|
||||||
err = ctx.JSON(http.StatusOK, result)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
63
vendor/code.vikunja.io/web/handler/read_one.go
generated
vendored
63
vendor/code.vikunja.io/web/handler/read_one.go
generated
vendored
@ -1,63 +0,0 @@
|
|||||||
// Copyright (c) 2018 Vikunja and contributors.
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/labstack/echo/v4"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ReadOneWeb is the webhandler to get one object
|
|
||||||
func (c *WebHandler) ReadOneWeb(ctx echo.Context) error {
|
|
||||||
// Get our model
|
|
||||||
currentStruct := c.EmptyStruct()
|
|
||||||
|
|
||||||
// Get the object & bind params to struct
|
|
||||||
if err := ctx.Bind(currentStruct); err != nil {
|
|
||||||
config.LoggingProvider.Debugf("Invalid model error. Internal error was: %s", err.Error())
|
|
||||||
if he, is := err.(*echo.HTTPError); is {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
|
|
||||||
}
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided."))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check rights
|
|
||||||
currentAuth, err := config.AuthProvider.AuthObject(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusInternalServerError, "Could not determine the current user.")
|
|
||||||
}
|
|
||||||
canRead, err := currentStruct.CanRead(currentAuth)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
if !canRead {
|
|
||||||
config.LoggingProvider.Noticef("Tried to read while not having the rights for it (User: %v)", currentAuth)
|
|
||||||
return echo.NewHTTPError(http.StatusForbidden, "You don't have the right to see this")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get our object
|
|
||||||
err = currentStruct.ReadOne()
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = ctx.JSON(http.StatusOK, currentStruct)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
69
vendor/code.vikunja.io/web/handler/update.go
generated
vendored
69
vendor/code.vikunja.io/web/handler/update.go
generated
vendored
@ -1,69 +0,0 @@
|
|||||||
// Copyright (c) 2018 Vikunja and contributors.
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/labstack/echo/v4"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
// UpdateWeb is the webhandler to update an object
|
|
||||||
func (c *WebHandler) UpdateWeb(ctx echo.Context) error {
|
|
||||||
|
|
||||||
// Get our model
|
|
||||||
currentStruct := c.EmptyStruct()
|
|
||||||
|
|
||||||
// Get the object & bind params to struct
|
|
||||||
if err := ctx.Bind(currentStruct); err != nil {
|
|
||||||
config.LoggingProvider.Debugf("Invalid model error. Internal error was: %s", err.Error())
|
|
||||||
if he, is := err.(*echo.HTTPError); is {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
|
|
||||||
}
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided."))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate the struct
|
|
||||||
if err := ctx.Validate(currentStruct); err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the user has the right to do that
|
|
||||||
currentAuth, err := config.AuthProvider.AuthObject(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusInternalServerError, "Could not determine the current user.")
|
|
||||||
}
|
|
||||||
canUpdate, err := currentStruct.CanUpdate(currentAuth)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
if !canUpdate {
|
|
||||||
config.LoggingProvider.Noticef("Tried to update while not having the rights for it (User: %v)", currentAuth)
|
|
||||||
return echo.NewHTTPError(http.StatusForbidden)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do the update
|
|
||||||
err = currentStruct.Update()
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = ctx.JSON(http.StatusOK, currentStruct)
|
|
||||||
if err != nil {
|
|
||||||
return HandleHTTPError(err, ctx)
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
65
vendor/code.vikunja.io/web/web.go
generated
vendored
65
vendor/code.vikunja.io/web/web.go
generated
vendored
@ -1,65 +0,0 @@
|
|||||||
// Copyright (c) 2018 Vikunja and contributors.
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package web
|
|
||||||
|
|
||||||
import "github.com/labstack/echo/v4"
|
|
||||||
|
|
||||||
// Rights defines rights methods
|
|
||||||
type Rights interface {
|
|
||||||
IsAdmin(Auth) (bool, error)
|
|
||||||
CanWrite(Auth) (bool, error)
|
|
||||||
CanRead(Auth) (bool, error)
|
|
||||||
CanDelete(Auth) (bool, error)
|
|
||||||
CanUpdate(Auth) (bool, error)
|
|
||||||
CanCreate(Auth) (bool, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// CRUDable defines the crud methods
|
|
||||||
type CRUDable interface {
|
|
||||||
Create(Auth) error
|
|
||||||
ReadOne() error
|
|
||||||
ReadAll(auth Auth, search string, page int, perPage int) (result interface{}, resultCount int, numberOfTotalItems int64, err error)
|
|
||||||
Update() error
|
|
||||||
Delete() error
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTTPErrorProcessor is executed when the defined error is thrown, it will make sure the user sees an appropriate error message and http status code
|
|
||||||
type HTTPErrorProcessor interface {
|
|
||||||
HTTPError() HTTPError
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTTPError holds informations about an http error
|
|
||||||
type HTTPError struct {
|
|
||||||
HTTPCode int `json:"-"`
|
|
||||||
Code int `json:"code"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Auth defines the authentication interface used to get some auth thing
|
|
||||||
type Auth interface {
|
|
||||||
// Most of the time, we need an ID from the auth object only. Having this method saves the need to cast it.
|
|
||||||
GetID() int64
|
|
||||||
}
|
|
||||||
|
|
||||||
// Authprovider is a holder for the implementation of an authprovider by the application
|
|
||||||
type Authprovider interface {
|
|
||||||
GetAuthObject(echo.Context) (Auth, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Auths holds the authobject
|
|
||||||
type Auths struct {
|
|
||||||
AuthObject func(echo.Context) (Auth, error)
|
|
||||||
}
|
|
23
vendor/gitea.com/xorm/xorm-redis-cache/.gitignore
generated
vendored
23
vendor/gitea.com/xorm/xorm-redis-cache/.gitignore
generated
vendored
@ -1,23 +0,0 @@
|
|||||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
|
||||||
*.o
|
|
||||||
*.a
|
|
||||||
*.so
|
|
||||||
|
|
||||||
# Folders
|
|
||||||
_obj
|
|
||||||
_test
|
|
||||||
|
|
||||||
# Architecture specific extensions/prefixes
|
|
||||||
*.[568vq]
|
|
||||||
[568vq].out
|
|
||||||
|
|
||||||
*.cgo1.go
|
|
||||||
*.cgo2.c
|
|
||||||
_cgo_defun.c
|
|
||||||
_cgo_gotypes.go
|
|
||||||
_cgo_export.*
|
|
||||||
|
|
||||||
_testmain.go
|
|
||||||
|
|
||||||
*.exe
|
|
||||||
*.test
|
|
28
vendor/gitea.com/xorm/xorm-redis-cache/LICENSE
generated
vendored
28
vendor/gitea.com/xorm/xorm-redis-cache/LICENSE
generated
vendored
@ -1,28 +0,0 @@
|
|||||||
Copyright (c) 2014, xorm
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
* Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
* Neither the name of the {organization} nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
||||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
6
vendor/gitea.com/xorm/xorm-redis-cache/README.md
generated
vendored
6
vendor/gitea.com/xorm/xorm-redis-cache/README.md
generated
vendored
@ -1,6 +0,0 @@
|
|||||||
xorm-redis-cache
|
|
||||||
================
|
|
||||||
|
|
||||||
XORM Redis Cache is a cacher implementation for XORM cache.
|
|
||||||
|
|
||||||
[](https://godoc.org/gitea.com/xorm/xorm-redis-cache)
|
|
10
vendor/gitea.com/xorm/xorm-redis-cache/go.mod
generated
vendored
10
vendor/gitea.com/xorm/xorm-redis-cache/go.mod
generated
vendored
@ -1,10 +0,0 @@
|
|||||||
module gitea.com/xorm/xorm-redis-cache
|
|
||||||
|
|
||||||
go 1.13
|
|
||||||
|
|
||||||
require (
|
|
||||||
gitea.com/xorm/tests v0.7.0
|
|
||||||
github.com/garyburd/redigo v1.6.0
|
|
||||||
github.com/go-sql-driver/mysql v1.4.1
|
|
||||||
xorm.io/xorm v1.0.1
|
|
||||||
)
|
|
173
vendor/gitea.com/xorm/xorm-redis-cache/go.sum
generated
vendored
173
vendor/gitea.com/xorm/xorm-redis-cache/go.sum
generated
vendored
@ -1,173 +0,0 @@
|
|||||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
|
||||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
|
||||||
cloud.google.com/go v0.37.4 h1:glPeL3BQJsbF6aIIYfZizMwc5LTYz250bDMjttbBGAU=
|
|
||||||
cloud.google.com/go v0.37.4/go.mod h1:NHPJ89PdicEuT9hdPXMROBD91xc5uRDxsMtSB16k7hw=
|
|
||||||
gitea.com/xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a h1:lSA0F4e9A2NcQSqGqTOXqu2aRi/XEQxDCBwM8yJtE6s=
|
|
||||||
gitea.com/xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a/go.mod h1:EXuID2Zs0pAQhH8yz+DNjUbjppKQzKFAn28TMYPB6IU=
|
|
||||||
gitea.com/xorm/tests v0.7.0 h1:pFcaxTGGAWw3rDuVfhBdyr+mX1uzdTtncyAKxkCQ/IE=
|
|
||||||
gitea.com/xorm/tests v0.7.0/go.mod h1:ngmhQrSBgihBbOqw1hdReSQJAnTlbStYTn0vruUFwDc=
|
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
|
||||||
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
|
|
||||||
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
|
|
||||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
|
||||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
|
||||||
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
|
||||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
|
||||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
|
||||||
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/denisenkom/go-mssqldb v0.0.0-20190707035753-2be1aa521ff4 h1:YcpmyvADGYw5LqMnHqSkyIELsHCGF6PkrmM31V8rF7o=
|
|
||||||
github.com/denisenkom/go-mssqldb v0.0.0-20190707035753-2be1aa521ff4/go.mod h1:zAg7JM8CkOJ43xKXIj7eRO9kmWm/TW578qo+oDO6tuM=
|
|
||||||
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
|
|
||||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
|
||||||
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
|
||||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
|
||||||
github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc=
|
|
||||||
github.com/garyburd/redigo v1.6.0/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
|
|
||||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
|
||||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
|
||||||
github.com/go-sql-driver/mysql v1.4.1 h1:g24URVg0OFbNUTx9qqY1IRZ9D9z3iPyi5zKhQZpNwpA=
|
|
||||||
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
|
||||||
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
|
||||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
|
||||||
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
|
||||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
|
||||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
|
||||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
|
||||||
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
|
|
||||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
|
||||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db h1:woRePGFeVFfLKN/pOkfl+p/TAqKOfFu+7KPlMVpok/w=
|
|
||||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
|
||||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
|
||||||
github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
|
|
||||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
|
||||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
|
||||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
|
||||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
|
||||||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
|
||||||
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
|
||||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
|
||||||
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
|
|
||||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
|
||||||
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ=
|
|
||||||
github.com/jackc/pgx v3.6.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
|
|
||||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
|
||||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
|
||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
|
||||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
|
||||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
|
||||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
|
||||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
|
||||||
github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A=
|
|
||||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
|
||||||
github.com/mattn/go-oci8 v0.0.0-20191108001511-cbd8d5bc1da0/go.mod h1:/M9VLO+lUPmxvoOK2PfWRZ8mTtB4q1Hy9lEGijv9Nr8=
|
|
||||||
github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK860o=
|
|
||||||
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
|
||||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
|
||||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
|
||||||
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
|
|
||||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
|
||||||
github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU=
|
|
||||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
|
||||||
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
|
||||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
|
||||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
|
||||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
|
||||||
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
|
|
||||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
|
||||||
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
|
||||||
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
|
||||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
|
||||||
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
|
||||||
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
|
||||||
github.com/shopspring/decimal v0.0.0-20191009025716-f1972eb1d1f5/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
|
||||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
|
||||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
|
||||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
|
||||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
|
||||||
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
|
|
||||||
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
|
|
||||||
github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs=
|
|
||||||
github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0=
|
|
||||||
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
|
||||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
|
||||||
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c h1:Vj5n4GlwjmQteupaxJ9+0FNOmBrHfq7vN4btdGoDZgI=
|
|
||||||
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
|
||||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
|
||||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
|
||||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628=
|
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
|
||||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 h1:bjcUS9ztw9kFmmIxJInhon/0Is3p+EHBKNgquIzo1OI=
|
|
||||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
|
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|
||||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2 h1:z99zHgr7hKfrUcX/KsoJk5FJfjTceCKIp96+biqP4To=
|
|
||||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
|
||||||
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
|
||||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
|
||||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
|
||||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
|
||||||
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
|
|
||||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
|
||||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
|
||||||
google.golang.org/appengine v1.6.0 h1:Tfd7cKwKbFRsI8RMAD3oqqw7JPFRrvFlOsfbgVkjOOw=
|
|
||||||
google.golang.org/appengine v1.6.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
|
||||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
|
||||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
|
||||||
google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
|
||||||
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
|
|
||||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
|
||||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
||||||
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
|
||||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
||||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
||||||
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
|
||||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
|
||||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
|
||||||
xorm.io/builder v0.3.7 h1:2pETdKRK+2QG4mLX4oODHEhn5Z8j1m8sXa7jfu+/SZI=
|
|
||||||
xorm.io/builder v0.3.7/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
|
|
||||||
xorm.io/xorm v1.0.1 h1:/lITxpJtkZauNpdzj+L9CN/3OQxZaABrbergMcJu+Cw=
|
|
||||||
xorm.io/xorm v1.0.1/go.mod h1:o4vnEsQ5V2F1/WK6w4XTwmiWJeGj82tqjAnHe44wVHY=
|
|
315
vendor/gitea.com/xorm/xorm-redis-cache/redis_cacher.go
generated
vendored
315
vendor/gitea.com/xorm/xorm-redis-cache/redis_cacher.go
generated
vendored
@ -1,315 +0,0 @@
|
|||||||
package xormrediscache
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/gob"
|
|
||||||
"fmt"
|
|
||||||
"hash/crc32"
|
|
||||||
"reflect"
|
|
||||||
"time"
|
|
||||||
"unsafe"
|
|
||||||
|
|
||||||
"github.com/garyburd/redigo/redis"
|
|
||||||
"xorm.io/xorm/caches"
|
|
||||||
"xorm.io/xorm/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
DEFAULT_EXPIRATION = time.Duration(0)
|
|
||||||
FOREVER_EXPIRATION = time.Duration(-1)
|
|
||||||
|
|
||||||
LOGGING_PREFIX = "[redis_cacher]"
|
|
||||||
)
|
|
||||||
|
|
||||||
// RedisCacher wraps the Redis client to meet the Cache interface.
|
|
||||||
type RedisCacher struct {
|
|
||||||
pool *redis.Pool
|
|
||||||
defaultExpiration time.Duration
|
|
||||||
|
|
||||||
Logger log.ContextLogger
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewRedisCacher creates a Redis Cacher, host as IP endpoint, i.e., localhost:6379, provide empty string or nil if Redis server doesn't
|
|
||||||
// require AUTH command, defaultExpiration sets the expire duration for a key to live. Until redigo supports
|
|
||||||
// sharding/clustering, only one host will be in hostList
|
|
||||||
//
|
|
||||||
// engine.SetDefaultCacher(xormrediscache.NewRedisCacher("localhost:6379", "", xormrediscache.DEFAULT_EXPIRATION, engine.Logger))
|
|
||||||
//
|
|
||||||
// or set MapCacher
|
|
||||||
//
|
|
||||||
// engine.MapCacher(&user, xormrediscache.NewRedisCacher("localhost:6379", "", xormrediscache.DEFAULT_EXPIRATION, engine.Logger))
|
|
||||||
//
|
|
||||||
func NewRedisCacher(host string, password string, defaultExpiration time.Duration, logger log.ContextLogger) *RedisCacher {
|
|
||||||
var pool = &redis.Pool{
|
|
||||||
MaxIdle: 5,
|
|
||||||
IdleTimeout: 240 * time.Second,
|
|
||||||
Dial: func() (redis.Conn, error) {
|
|
||||||
// the redis protocol should probably be made sett-able
|
|
||||||
c, err := redis.Dial("tcp", host)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if len(password) > 0 {
|
|
||||||
if _, err := c.Do("AUTH", password); err != nil {
|
|
||||||
c.Close()
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// check with PING
|
|
||||||
if _, err := c.Do("PING"); err != nil {
|
|
||||||
c.Close()
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return c, err
|
|
||||||
},
|
|
||||||
// custom connection test method
|
|
||||||
TestOnBorrow: func(c redis.Conn, t time.Time) error {
|
|
||||||
if _, err := c.Do("PING"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return MakeRedisCacher(pool, defaultExpiration, logger)
|
|
||||||
}
|
|
||||||
|
|
||||||
// MakeRedisCacher build a cacher based on redis.Pool
|
|
||||||
func MakeRedisCacher(pool *redis.Pool, defaultExpiration time.Duration, logger log.ContextLogger) *RedisCacher {
|
|
||||||
return &RedisCacher{pool: pool, defaultExpiration: defaultExpiration, Logger: logger}
|
|
||||||
}
|
|
||||||
|
|
||||||
func exists(conn redis.Conn, key string) bool {
|
|
||||||
existed, _ := redis.Bool(conn.Do("EXISTS", key))
|
|
||||||
return existed
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) logErrf(format string, contents ...interface{}) {
|
|
||||||
if c.Logger != nil {
|
|
||||||
c.Logger.Errorf(fmt.Sprintf("%s %s", LOGGING_PREFIX, format), contents...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) logDebugf(format string, contents ...interface{}) {
|
|
||||||
if c.Logger != nil {
|
|
||||||
c.Logger.Debugf(fmt.Sprintf("%s %s", LOGGING_PREFIX, format), contents...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) getBeanKey(tableName string, id string) string {
|
|
||||||
return fmt.Sprintf("xorm:bean:%s:%s", tableName, id)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) getSqlKey(tableName string, sql string) string {
|
|
||||||
// hash sql to minimize key length
|
|
||||||
crc := crc32.ChecksumIEEE([]byte(sql))
|
|
||||||
return fmt.Sprintf("xorm:sql:%s:%d", tableName, crc)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Flush deletes all xorm cached objects
|
|
||||||
func (c *RedisCacher) Flush() error {
|
|
||||||
// conn := c.pool.Get()
|
|
||||||
// defer conn.Close()
|
|
||||||
// _, err := conn.Do("FLUSHALL")
|
|
||||||
// return err
|
|
||||||
return c.delObject("xorm:*")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) getObject(key string) interface{} {
|
|
||||||
conn := c.pool.Get()
|
|
||||||
defer conn.Close()
|
|
||||||
raw, err := conn.Do("GET", key)
|
|
||||||
if raw == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
item, err := redis.Bytes(raw, err)
|
|
||||||
if err != nil {
|
|
||||||
c.logErrf("redis.Bytes failed: %s", err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
value, err := c.deserialize(item)
|
|
||||||
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) GetIds(tableName, sql string) interface{} {
|
|
||||||
sqlKey := c.getSqlKey(tableName, sql)
|
|
||||||
c.logDebugf(" GetIds|tableName:%s|sql:%s|key:%s", tableName, sql, sqlKey)
|
|
||||||
return c.getObject(sqlKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) GetBean(tableName string, id string) interface{} {
|
|
||||||
beanKey := c.getBeanKey(tableName, id)
|
|
||||||
c.logDebugf("[xorm/redis_cacher] GetBean|tableName:%s|id:%s|key:%s", tableName, id, beanKey)
|
|
||||||
return c.getObject(beanKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) putObject(key string, value interface{}) {
|
|
||||||
c.invoke(c.pool.Get().Do, key, value, c.defaultExpiration)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) PutIds(tableName, sql string, ids interface{}) {
|
|
||||||
sqlKey := c.getSqlKey(tableName, sql)
|
|
||||||
c.logDebugf("PutIds|tableName:%s|sql:%s|key:%s|obj:%s|type:%v", tableName, sql, sqlKey, ids, reflect.TypeOf(ids))
|
|
||||||
c.putObject(sqlKey, ids)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) PutBean(tableName string, id string, obj interface{}) {
|
|
||||||
beanKey := c.getBeanKey(tableName, id)
|
|
||||||
c.logDebugf("PutBean|tableName:%s|id:%s|key:%s|type:%v", tableName, id, beanKey, reflect.TypeOf(obj))
|
|
||||||
c.putObject(beanKey, obj)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) delObject(key string) error {
|
|
||||||
c.logDebugf("delObject key:[%s]", key)
|
|
||||||
|
|
||||||
conn := c.pool.Get()
|
|
||||||
defer conn.Close()
|
|
||||||
if !exists(conn, key) {
|
|
||||||
c.logErrf("delObject key:[%s] err: %v", key, caches.ErrCacheMiss)
|
|
||||||
return caches.ErrCacheMiss
|
|
||||||
}
|
|
||||||
_, err := conn.Do("DEL", key)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) delObjects(key string) error {
|
|
||||||
|
|
||||||
c.logDebugf("delObjects key:[%s]", key)
|
|
||||||
|
|
||||||
conn := c.pool.Get()
|
|
||||||
defer conn.Close()
|
|
||||||
|
|
||||||
keys, err := conn.Do("KEYS", key)
|
|
||||||
c.logDebugf("delObjects keys: %v", keys)
|
|
||||||
|
|
||||||
if err == nil {
|
|
||||||
for _, key := range keys.([]interface{}) {
|
|
||||||
conn.Do("DEL", key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) DelIds(tableName, sql string) {
|
|
||||||
c.delObject(c.getSqlKey(tableName, sql))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) DelBean(tableName string, id string) {
|
|
||||||
c.delObject(c.getBeanKey(tableName, id))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) ClearIds(tableName string) {
|
|
||||||
c.delObjects(fmt.Sprintf("xorm:sql:%s:*", tableName))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) ClearBeans(tableName string) {
|
|
||||||
c.delObjects(c.getBeanKey(tableName, "*"))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) invoke(f func(string, ...interface{}) (interface{}, error),
|
|
||||||
key string, value interface{}, expires time.Duration) error {
|
|
||||||
|
|
||||||
switch expires {
|
|
||||||
case DEFAULT_EXPIRATION:
|
|
||||||
expires = c.defaultExpiration
|
|
||||||
case FOREVER_EXPIRATION:
|
|
||||||
expires = time.Duration(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
b, err := c.serialize(value)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
conn := c.pool.Get()
|
|
||||||
defer conn.Close()
|
|
||||||
if expires > 0 {
|
|
||||||
_, err := f("SETEX", key, int32(expires/time.Second), b)
|
|
||||||
return err
|
|
||||||
} else {
|
|
||||||
_, err := f("SET", key, b)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) serialize(value interface{}) ([]byte, error) {
|
|
||||||
|
|
||||||
err := c.registerGobConcreteType(value)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if reflect.TypeOf(value).Kind() == reflect.Struct {
|
|
||||||
return nil, fmt.Errorf("serialize func only take pointer of a struct")
|
|
||||||
}
|
|
||||||
|
|
||||||
var b bytes.Buffer
|
|
||||||
encoder := gob.NewEncoder(&b)
|
|
||||||
|
|
||||||
c.logDebugf("serialize type:%v", reflect.TypeOf(value))
|
|
||||||
err = encoder.Encode(&value)
|
|
||||||
if err != nil {
|
|
||||||
c.logErrf("gob encoding '%s' failed: %s|value:%v", value, err, value)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return b.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) deserialize(byt []byte) (ptr interface{}, err error) {
|
|
||||||
b := bytes.NewBuffer(byt)
|
|
||||||
decoder := gob.NewDecoder(b)
|
|
||||||
|
|
||||||
var p interface{}
|
|
||||||
err = decoder.Decode(&p)
|
|
||||||
if err != nil {
|
|
||||||
c.logErrf("decode failed: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
v := reflect.ValueOf(p)
|
|
||||||
c.logDebugf("deserialize type:%v", v.Type())
|
|
||||||
if v.Kind() == reflect.Struct {
|
|
||||||
|
|
||||||
var pp interface{} = &p
|
|
||||||
datas := reflect.ValueOf(pp).Elem().InterfaceData()
|
|
||||||
|
|
||||||
sp := reflect.NewAt(v.Type(),
|
|
||||||
unsafe.Pointer(datas[1])).Interface()
|
|
||||||
ptr = sp
|
|
||||||
vv := reflect.ValueOf(ptr)
|
|
||||||
c.logDebugf("deserialize convert ptr type:%v | CanAddr:%t", vv.Type(), vv.CanAddr())
|
|
||||||
} else {
|
|
||||||
ptr = p
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) registerGobConcreteType(value interface{}) error {
|
|
||||||
|
|
||||||
t := reflect.TypeOf(value)
|
|
||||||
|
|
||||||
c.logDebugf("registerGobConcreteType:%v", t)
|
|
||||||
|
|
||||||
switch t.Kind() {
|
|
||||||
case reflect.Ptr:
|
|
||||||
v := reflect.ValueOf(value)
|
|
||||||
i := v.Elem().Interface()
|
|
||||||
gob.Register(&i)
|
|
||||||
case reflect.Struct, reflect.Map, reflect.Slice:
|
|
||||||
gob.Register(value)
|
|
||||||
case reflect.String, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Bool, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128:
|
|
||||||
// do nothing since already registered known type
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("unhandled type: %v", t)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) GetPool() (*redis.Pool, error) {
|
|
||||||
return c.pool, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *RedisCacher) SetPool(pool *redis.Pool) {
|
|
||||||
c.pool = pool
|
|
||||||
}
|
|
6
vendor/gitea.com/xorm/xorm-redis-cache/run_test.sh
generated
vendored
6
vendor/gitea.com/xorm/xorm-redis-cache/run_test.sh
generated
vendored
@ -1,6 +0,0 @@
|
|||||||
redis-cli FLUSHALL
|
|
||||||
if [ $? == "0" ];then
|
|
||||||
go test -v -run=TestMysqlWithCache
|
|
||||||
else
|
|
||||||
echo "no redis-server running on localhost"
|
|
||||||
fi
|
|
5
vendor/github.com/BurntSushi/toml/.gitignore
generated
vendored
5
vendor/github.com/BurntSushi/toml/.gitignore
generated
vendored
@ -1,5 +0,0 @@
|
|||||||
TAGS
|
|
||||||
tags
|
|
||||||
.*.swp
|
|
||||||
tomlcheck/tomlcheck
|
|
||||||
toml.test
|
|
15
vendor/github.com/BurntSushi/toml/.travis.yml
generated
vendored
15
vendor/github.com/BurntSushi/toml/.travis.yml
generated
vendored
@ -1,15 +0,0 @@
|
|||||||
language: go
|
|
||||||
go:
|
|
||||||
- 1.1
|
|
||||||
- 1.2
|
|
||||||
- 1.3
|
|
||||||
- 1.4
|
|
||||||
- 1.5
|
|
||||||
- 1.6
|
|
||||||
- tip
|
|
||||||
install:
|
|
||||||
- go install ./...
|
|
||||||
- go get github.com/BurntSushi/toml-test
|
|
||||||
script:
|
|
||||||
- export PATH="$PATH:$HOME/gopath/bin"
|
|
||||||
- make test
|
|
3
vendor/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
3
vendor/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
@ -1,3 +0,0 @@
|
|||||||
Compatible with TOML version
|
|
||||||
[v0.4.0](https://github.com/toml-lang/toml/blob/v0.4.0/versions/en/toml-v0.4.0.md)
|
|
||||||
|
|
21
vendor/github.com/BurntSushi/toml/COPYING
generated
vendored
21
vendor/github.com/BurntSushi/toml/COPYING
generated
vendored
@ -1,21 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2013 TOML authors
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
19
vendor/github.com/BurntSushi/toml/Makefile
generated
vendored
19
vendor/github.com/BurntSushi/toml/Makefile
generated
vendored
@ -1,19 +0,0 @@
|
|||||||
install:
|
|
||||||
go install ./...
|
|
||||||
|
|
||||||
test: install
|
|
||||||
go test -v
|
|
||||||
toml-test toml-test-decoder
|
|
||||||
toml-test -encoder toml-test-encoder
|
|
||||||
|
|
||||||
fmt:
|
|
||||||
gofmt -w *.go */*.go
|
|
||||||
colcheck *.go */*.go
|
|
||||||
|
|
||||||
tags:
|
|
||||||
find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS
|
|
||||||
|
|
||||||
push:
|
|
||||||
git push origin master
|
|
||||||
git push github master
|
|
||||||
|
|
218
vendor/github.com/BurntSushi/toml/README.md
generated
vendored
218
vendor/github.com/BurntSushi/toml/README.md
generated
vendored
@ -1,218 +0,0 @@
|
|||||||
## TOML parser and encoder for Go with reflection
|
|
||||||
|
|
||||||
TOML stands for Tom's Obvious, Minimal Language. This Go package provides a
|
|
||||||
reflection interface similar to Go's standard library `json` and `xml`
|
|
||||||
packages. This package also supports the `encoding.TextUnmarshaler` and
|
|
||||||
`encoding.TextMarshaler` interfaces so that you can define custom data
|
|
||||||
representations. (There is an example of this below.)
|
|
||||||
|
|
||||||
Spec: https://github.com/toml-lang/toml
|
|
||||||
|
|
||||||
Compatible with TOML version
|
|
||||||
[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
|
|
||||||
|
|
||||||
Documentation: https://godoc.org/github.com/BurntSushi/toml
|
|
||||||
|
|
||||||
Installation:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
go get github.com/BurntSushi/toml
|
|
||||||
```
|
|
||||||
|
|
||||||
Try the toml validator:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
go get github.com/BurntSushi/toml/cmd/tomlv
|
|
||||||
tomlv some-toml-file.toml
|
|
||||||
```
|
|
||||||
|
|
||||||
[](https://travis-ci.org/BurntSushi/toml) [](https://godoc.org/github.com/BurntSushi/toml)
|
|
||||||
|
|
||||||
### Testing
|
|
||||||
|
|
||||||
This package passes all tests in
|
|
||||||
[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder
|
|
||||||
and the encoder.
|
|
||||||
|
|
||||||
### Examples
|
|
||||||
|
|
||||||
This package works similarly to how the Go standard library handles `XML`
|
|
||||||
and `JSON`. Namely, data is loaded into Go values via reflection.
|
|
||||||
|
|
||||||
For the simplest example, consider some TOML file as just a list of keys
|
|
||||||
and values:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
Age = 25
|
|
||||||
Cats = [ "Cauchy", "Plato" ]
|
|
||||||
Pi = 3.14
|
|
||||||
Perfection = [ 6, 28, 496, 8128 ]
|
|
||||||
DOB = 1987-07-05T05:45:00Z
|
|
||||||
```
|
|
||||||
|
|
||||||
Which could be defined in Go as:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type Config struct {
|
|
||||||
Age int
|
|
||||||
Cats []string
|
|
||||||
Pi float64
|
|
||||||
Perfection []int
|
|
||||||
DOB time.Time // requires `import time`
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
And then decoded with:
|
|
||||||
|
|
||||||
```go
|
|
||||||
var conf Config
|
|
||||||
if _, err := toml.Decode(tomlData, &conf); err != nil {
|
|
||||||
// handle error
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
You can also use struct tags if your struct field name doesn't map to a TOML
|
|
||||||
key value directly:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
some_key_NAME = "wat"
|
|
||||||
```
|
|
||||||
|
|
||||||
```go
|
|
||||||
type TOML struct {
|
|
||||||
ObscureKey string `toml:"some_key_NAME"`
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using the `encoding.TextUnmarshaler` interface
|
|
||||||
|
|
||||||
Here's an example that automatically parses duration strings into
|
|
||||||
`time.Duration` values:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[[song]]
|
|
||||||
name = "Thunder Road"
|
|
||||||
duration = "4m49s"
|
|
||||||
|
|
||||||
[[song]]
|
|
||||||
name = "Stairway to Heaven"
|
|
||||||
duration = "8m03s"
|
|
||||||
```
|
|
||||||
|
|
||||||
Which can be decoded with:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type song struct {
|
|
||||||
Name string
|
|
||||||
Duration duration
|
|
||||||
}
|
|
||||||
type songs struct {
|
|
||||||
Song []song
|
|
||||||
}
|
|
||||||
var favorites songs
|
|
||||||
if _, err := toml.Decode(blob, &favorites); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, s := range favorites.Song {
|
|
||||||
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
And you'll also need a `duration` type that satisfies the
|
|
||||||
`encoding.TextUnmarshaler` interface:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type duration struct {
|
|
||||||
time.Duration
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *duration) UnmarshalText(text []byte) error {
|
|
||||||
var err error
|
|
||||||
d.Duration, err = time.ParseDuration(string(text))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### More complex usage
|
|
||||||
|
|
||||||
Here's an example of how to load the example from the official spec page:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
# This is a TOML document. Boom.
|
|
||||||
|
|
||||||
title = "TOML Example"
|
|
||||||
|
|
||||||
[owner]
|
|
||||||
name = "Tom Preston-Werner"
|
|
||||||
organization = "GitHub"
|
|
||||||
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
|
||||||
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
|
||||||
|
|
||||||
[database]
|
|
||||||
server = "192.168.1.1"
|
|
||||||
ports = [ 8001, 8001, 8002 ]
|
|
||||||
connection_max = 5000
|
|
||||||
enabled = true
|
|
||||||
|
|
||||||
[servers]
|
|
||||||
|
|
||||||
# You can indent as you please. Tabs or spaces. TOML don't care.
|
|
||||||
[servers.alpha]
|
|
||||||
ip = "10.0.0.1"
|
|
||||||
dc = "eqdc10"
|
|
||||||
|
|
||||||
[servers.beta]
|
|
||||||
ip = "10.0.0.2"
|
|
||||||
dc = "eqdc10"
|
|
||||||
|
|
||||||
[clients]
|
|
||||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
|
||||||
|
|
||||||
# Line breaks are OK when inside arrays
|
|
||||||
hosts = [
|
|
||||||
"alpha",
|
|
||||||
"omega"
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
And the corresponding Go types are:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type tomlConfig struct {
|
|
||||||
Title string
|
|
||||||
Owner ownerInfo
|
|
||||||
DB database `toml:"database"`
|
|
||||||
Servers map[string]server
|
|
||||||
Clients clients
|
|
||||||
}
|
|
||||||
|
|
||||||
type ownerInfo struct {
|
|
||||||
Name string
|
|
||||||
Org string `toml:"organization"`
|
|
||||||
Bio string
|
|
||||||
DOB time.Time
|
|
||||||
}
|
|
||||||
|
|
||||||
type database struct {
|
|
||||||
Server string
|
|
||||||
Ports []int
|
|
||||||
ConnMax int `toml:"connection_max"`
|
|
||||||
Enabled bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type server struct {
|
|
||||||
IP string
|
|
||||||
DC string
|
|
||||||
}
|
|
||||||
|
|
||||||
type clients struct {
|
|
||||||
Data [][]interface{}
|
|
||||||
Hosts []string
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that a case insensitive match will be tried if an exact match can't be
|
|
||||||
found.
|
|
||||||
|
|
||||||
A working example of the above can be found in `_examples/example.{go,toml}`.
|
|
509
vendor/github.com/BurntSushi/toml/decode.go
generated
vendored
509
vendor/github.com/BurntSushi/toml/decode.go
generated
vendored
@ -1,509 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"math"
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
func e(format string, args ...interface{}) error {
|
|
||||||
return fmt.Errorf("toml: "+format, args...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unmarshaler is the interface implemented by objects that can unmarshal a
|
|
||||||
// TOML description of themselves.
|
|
||||||
type Unmarshaler interface {
|
|
||||||
UnmarshalTOML(interface{}) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`.
|
|
||||||
func Unmarshal(p []byte, v interface{}) error {
|
|
||||||
_, err := Decode(string(p), v)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Primitive is a TOML value that hasn't been decoded into a Go value.
|
|
||||||
// When using the various `Decode*` functions, the type `Primitive` may
|
|
||||||
// be given to any value, and its decoding will be delayed.
|
|
||||||
//
|
|
||||||
// A `Primitive` value can be decoded using the `PrimitiveDecode` function.
|
|
||||||
//
|
|
||||||
// The underlying representation of a `Primitive` value is subject to change.
|
|
||||||
// Do not rely on it.
|
|
||||||
//
|
|
||||||
// N.B. Primitive values are still parsed, so using them will only avoid
|
|
||||||
// the overhead of reflection. They can be useful when you don't know the
|
|
||||||
// exact type of TOML data until run time.
|
|
||||||
type Primitive struct {
|
|
||||||
undecoded interface{}
|
|
||||||
context Key
|
|
||||||
}
|
|
||||||
|
|
||||||
// DEPRECATED!
|
|
||||||
//
|
|
||||||
// Use MetaData.PrimitiveDecode instead.
|
|
||||||
func PrimitiveDecode(primValue Primitive, v interface{}) error {
|
|
||||||
md := MetaData{decoded: make(map[string]bool)}
|
|
||||||
return md.unify(primValue.undecoded, rvalue(v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// PrimitiveDecode is just like the other `Decode*` functions, except it
|
|
||||||
// decodes a TOML value that has already been parsed. Valid primitive values
|
|
||||||
// can *only* be obtained from values filled by the decoder functions,
|
|
||||||
// including this method. (i.e., `v` may contain more `Primitive`
|
|
||||||
// values.)
|
|
||||||
//
|
|
||||||
// Meta data for primitive values is included in the meta data returned by
|
|
||||||
// the `Decode*` functions with one exception: keys returned by the Undecoded
|
|
||||||
// method will only reflect keys that were decoded. Namely, any keys hidden
|
|
||||||
// behind a Primitive will be considered undecoded. Executing this method will
|
|
||||||
// update the undecoded keys in the meta data. (See the example.)
|
|
||||||
func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error {
|
|
||||||
md.context = primValue.context
|
|
||||||
defer func() { md.context = nil }()
|
|
||||||
return md.unify(primValue.undecoded, rvalue(v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decode will decode the contents of `data` in TOML format into a pointer
|
|
||||||
// `v`.
|
|
||||||
//
|
|
||||||
// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be
|
|
||||||
// used interchangeably.)
|
|
||||||
//
|
|
||||||
// TOML arrays of tables correspond to either a slice of structs or a slice
|
|
||||||
// of maps.
|
|
||||||
//
|
|
||||||
// TOML datetimes correspond to Go `time.Time` values.
|
|
||||||
//
|
|
||||||
// All other TOML types (float, string, int, bool and array) correspond
|
|
||||||
// to the obvious Go types.
|
|
||||||
//
|
|
||||||
// An exception to the above rules is if a type implements the
|
|
||||||
// encoding.TextUnmarshaler interface. In this case, any primitive TOML value
|
|
||||||
// (floats, strings, integers, booleans and datetimes) will be converted to
|
|
||||||
// a byte string and given to the value's UnmarshalText method. See the
|
|
||||||
// Unmarshaler example for a demonstration with time duration strings.
|
|
||||||
//
|
|
||||||
// Key mapping
|
|
||||||
//
|
|
||||||
// TOML keys can map to either keys in a Go map or field names in a Go
|
|
||||||
// struct. The special `toml` struct tag may be used to map TOML keys to
|
|
||||||
// struct fields that don't match the key name exactly. (See the example.)
|
|
||||||
// A case insensitive match to struct names will be tried if an exact match
|
|
||||||
// can't be found.
|
|
||||||
//
|
|
||||||
// The mapping between TOML values and Go values is loose. That is, there
|
|
||||||
// may exist TOML values that cannot be placed into your representation, and
|
|
||||||
// there may be parts of your representation that do not correspond to
|
|
||||||
// TOML values. This loose mapping can be made stricter by using the IsDefined
|
|
||||||
// and/or Undecoded methods on the MetaData returned.
|
|
||||||
//
|
|
||||||
// This decoder will not handle cyclic types. If a cyclic type is passed,
|
|
||||||
// `Decode` will not terminate.
|
|
||||||
func Decode(data string, v interface{}) (MetaData, error) {
|
|
||||||
rv := reflect.ValueOf(v)
|
|
||||||
if rv.Kind() != reflect.Ptr {
|
|
||||||
return MetaData{}, e("Decode of non-pointer %s", reflect.TypeOf(v))
|
|
||||||
}
|
|
||||||
if rv.IsNil() {
|
|
||||||
return MetaData{}, e("Decode of nil %s", reflect.TypeOf(v))
|
|
||||||
}
|
|
||||||
p, err := parse(data)
|
|
||||||
if err != nil {
|
|
||||||
return MetaData{}, err
|
|
||||||
}
|
|
||||||
md := MetaData{
|
|
||||||
p.mapping, p.types, p.ordered,
|
|
||||||
make(map[string]bool, len(p.ordered)), nil,
|
|
||||||
}
|
|
||||||
return md, md.unify(p.mapping, indirect(rv))
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeFile is just like Decode, except it will automatically read the
|
|
||||||
// contents of the file at `fpath` and decode it for you.
|
|
||||||
func DecodeFile(fpath string, v interface{}) (MetaData, error) {
|
|
||||||
bs, err := ioutil.ReadFile(fpath)
|
|
||||||
if err != nil {
|
|
||||||
return MetaData{}, err
|
|
||||||
}
|
|
||||||
return Decode(string(bs), v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeReader is just like Decode, except it will consume all bytes
|
|
||||||
// from the reader and decode it for you.
|
|
||||||
func DecodeReader(r io.Reader, v interface{}) (MetaData, error) {
|
|
||||||
bs, err := ioutil.ReadAll(r)
|
|
||||||
if err != nil {
|
|
||||||
return MetaData{}, err
|
|
||||||
}
|
|
||||||
return Decode(string(bs), v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// unify performs a sort of type unification based on the structure of `rv`,
|
|
||||||
// which is the client representation.
|
|
||||||
//
|
|
||||||
// Any type mismatch produces an error. Finding a type that we don't know
|
|
||||||
// how to handle produces an unsupported type error.
|
|
||||||
func (md *MetaData) unify(data interface{}, rv reflect.Value) error {
|
|
||||||
|
|
||||||
// Special case. Look for a `Primitive` value.
|
|
||||||
if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() {
|
|
||||||
// Save the undecoded data and the key context into the primitive
|
|
||||||
// value.
|
|
||||||
context := make(Key, len(md.context))
|
|
||||||
copy(context, md.context)
|
|
||||||
rv.Set(reflect.ValueOf(Primitive{
|
|
||||||
undecoded: data,
|
|
||||||
context: context,
|
|
||||||
}))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special case. Unmarshaler Interface support.
|
|
||||||
if rv.CanAddr() {
|
|
||||||
if v, ok := rv.Addr().Interface().(Unmarshaler); ok {
|
|
||||||
return v.UnmarshalTOML(data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special case. Handle time.Time values specifically.
|
|
||||||
// TODO: Remove this code when we decide to drop support for Go 1.1.
|
|
||||||
// This isn't necessary in Go 1.2 because time.Time satisfies the encoding
|
|
||||||
// interfaces.
|
|
||||||
if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) {
|
|
||||||
return md.unifyDatetime(data, rv)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special case. Look for a value satisfying the TextUnmarshaler interface.
|
|
||||||
if v, ok := rv.Interface().(TextUnmarshaler); ok {
|
|
||||||
return md.unifyText(data, v)
|
|
||||||
}
|
|
||||||
// BUG(burntsushi)
|
|
||||||
// The behavior here is incorrect whenever a Go type satisfies the
|
|
||||||
// encoding.TextUnmarshaler interface but also corresponds to a TOML
|
|
||||||
// hash or array. In particular, the unmarshaler should only be applied
|
|
||||||
// to primitive TOML values. But at this point, it will be applied to
|
|
||||||
// all kinds of values and produce an incorrect error whenever those values
|
|
||||||
// are hashes or arrays (including arrays of tables).
|
|
||||||
|
|
||||||
k := rv.Kind()
|
|
||||||
|
|
||||||
// laziness
|
|
||||||
if k >= reflect.Int && k <= reflect.Uint64 {
|
|
||||||
return md.unifyInt(data, rv)
|
|
||||||
}
|
|
||||||
switch k {
|
|
||||||
case reflect.Ptr:
|
|
||||||
elem := reflect.New(rv.Type().Elem())
|
|
||||||
err := md.unify(data, reflect.Indirect(elem))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
rv.Set(elem)
|
|
||||||
return nil
|
|
||||||
case reflect.Struct:
|
|
||||||
return md.unifyStruct(data, rv)
|
|
||||||
case reflect.Map:
|
|
||||||
return md.unifyMap(data, rv)
|
|
||||||
case reflect.Array:
|
|
||||||
return md.unifyArray(data, rv)
|
|
||||||
case reflect.Slice:
|
|
||||||
return md.unifySlice(data, rv)
|
|
||||||
case reflect.String:
|
|
||||||
return md.unifyString(data, rv)
|
|
||||||
case reflect.Bool:
|
|
||||||
return md.unifyBool(data, rv)
|
|
||||||
case reflect.Interface:
|
|
||||||
// we only support empty interfaces.
|
|
||||||
if rv.NumMethod() > 0 {
|
|
||||||
return e("unsupported type %s", rv.Type())
|
|
||||||
}
|
|
||||||
return md.unifyAnything(data, rv)
|
|
||||||
case reflect.Float32:
|
|
||||||
fallthrough
|
|
||||||
case reflect.Float64:
|
|
||||||
return md.unifyFloat64(data, rv)
|
|
||||||
}
|
|
||||||
return e("unsupported type %s", rv.Kind())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error {
|
|
||||||
tmap, ok := mapping.(map[string]interface{})
|
|
||||||
if !ok {
|
|
||||||
if mapping == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return e("type mismatch for %s: expected table but found %T",
|
|
||||||
rv.Type().String(), mapping)
|
|
||||||
}
|
|
||||||
|
|
||||||
for key, datum := range tmap {
|
|
||||||
var f *field
|
|
||||||
fields := cachedTypeFields(rv.Type())
|
|
||||||
for i := range fields {
|
|
||||||
ff := &fields[i]
|
|
||||||
if ff.name == key {
|
|
||||||
f = ff
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if f == nil && strings.EqualFold(ff.name, key) {
|
|
||||||
f = ff
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if f != nil {
|
|
||||||
subv := rv
|
|
||||||
for _, i := range f.index {
|
|
||||||
subv = indirect(subv.Field(i))
|
|
||||||
}
|
|
||||||
if isUnifiable(subv) {
|
|
||||||
md.decoded[md.context.add(key).String()] = true
|
|
||||||
md.context = append(md.context, key)
|
|
||||||
if err := md.unify(datum, subv); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
md.context = md.context[0 : len(md.context)-1]
|
|
||||||
} else if f.name != "" {
|
|
||||||
// Bad user! No soup for you!
|
|
||||||
return e("cannot write unexported field %s.%s",
|
|
||||||
rv.Type().String(), f.name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error {
|
|
||||||
tmap, ok := mapping.(map[string]interface{})
|
|
||||||
if !ok {
|
|
||||||
if tmap == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return badtype("map", mapping)
|
|
||||||
}
|
|
||||||
if rv.IsNil() {
|
|
||||||
rv.Set(reflect.MakeMap(rv.Type()))
|
|
||||||
}
|
|
||||||
for k, v := range tmap {
|
|
||||||
md.decoded[md.context.add(k).String()] = true
|
|
||||||
md.context = append(md.context, k)
|
|
||||||
|
|
||||||
rvkey := indirect(reflect.New(rv.Type().Key()))
|
|
||||||
rvval := reflect.Indirect(reflect.New(rv.Type().Elem()))
|
|
||||||
if err := md.unify(v, rvval); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
md.context = md.context[0 : len(md.context)-1]
|
|
||||||
|
|
||||||
rvkey.SetString(k)
|
|
||||||
rv.SetMapIndex(rvkey, rvval)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error {
|
|
||||||
datav := reflect.ValueOf(data)
|
|
||||||
if datav.Kind() != reflect.Slice {
|
|
||||||
if !datav.IsValid() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return badtype("slice", data)
|
|
||||||
}
|
|
||||||
sliceLen := datav.Len()
|
|
||||||
if sliceLen != rv.Len() {
|
|
||||||
return e("expected array length %d; got TOML array of length %d",
|
|
||||||
rv.Len(), sliceLen)
|
|
||||||
}
|
|
||||||
return md.unifySliceArray(datav, rv)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error {
|
|
||||||
datav := reflect.ValueOf(data)
|
|
||||||
if datav.Kind() != reflect.Slice {
|
|
||||||
if !datav.IsValid() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return badtype("slice", data)
|
|
||||||
}
|
|
||||||
n := datav.Len()
|
|
||||||
if rv.IsNil() || rv.Cap() < n {
|
|
||||||
rv.Set(reflect.MakeSlice(rv.Type(), n, n))
|
|
||||||
}
|
|
||||||
rv.SetLen(n)
|
|
||||||
return md.unifySliceArray(datav, rv)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifySliceArray(data, rv reflect.Value) error {
|
|
||||||
sliceLen := data.Len()
|
|
||||||
for i := 0; i < sliceLen; i++ {
|
|
||||||
v := data.Index(i).Interface()
|
|
||||||
sliceval := indirect(rv.Index(i))
|
|
||||||
if err := md.unify(v, sliceval); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error {
|
|
||||||
if _, ok := data.(time.Time); ok {
|
|
||||||
rv.Set(reflect.ValueOf(data))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return badtype("time.Time", data)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error {
|
|
||||||
if s, ok := data.(string); ok {
|
|
||||||
rv.SetString(s)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return badtype("string", data)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error {
|
|
||||||
if num, ok := data.(float64); ok {
|
|
||||||
switch rv.Kind() {
|
|
||||||
case reflect.Float32:
|
|
||||||
fallthrough
|
|
||||||
case reflect.Float64:
|
|
||||||
rv.SetFloat(num)
|
|
||||||
default:
|
|
||||||
panic("bug")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return badtype("float", data)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error {
|
|
||||||
if num, ok := data.(int64); ok {
|
|
||||||
if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 {
|
|
||||||
switch rv.Kind() {
|
|
||||||
case reflect.Int, reflect.Int64:
|
|
||||||
// No bounds checking necessary.
|
|
||||||
case reflect.Int8:
|
|
||||||
if num < math.MinInt8 || num > math.MaxInt8 {
|
|
||||||
return e("value %d is out of range for int8", num)
|
|
||||||
}
|
|
||||||
case reflect.Int16:
|
|
||||||
if num < math.MinInt16 || num > math.MaxInt16 {
|
|
||||||
return e("value %d is out of range for int16", num)
|
|
||||||
}
|
|
||||||
case reflect.Int32:
|
|
||||||
if num < math.MinInt32 || num > math.MaxInt32 {
|
|
||||||
return e("value %d is out of range for int32", num)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rv.SetInt(num)
|
|
||||||
} else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 {
|
|
||||||
unum := uint64(num)
|
|
||||||
switch rv.Kind() {
|
|
||||||
case reflect.Uint, reflect.Uint64:
|
|
||||||
// No bounds checking necessary.
|
|
||||||
case reflect.Uint8:
|
|
||||||
if num < 0 || unum > math.MaxUint8 {
|
|
||||||
return e("value %d is out of range for uint8", num)
|
|
||||||
}
|
|
||||||
case reflect.Uint16:
|
|
||||||
if num < 0 || unum > math.MaxUint16 {
|
|
||||||
return e("value %d is out of range for uint16", num)
|
|
||||||
}
|
|
||||||
case reflect.Uint32:
|
|
||||||
if num < 0 || unum > math.MaxUint32 {
|
|
||||||
return e("value %d is out of range for uint32", num)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rv.SetUint(unum)
|
|
||||||
} else {
|
|
||||||
panic("unreachable")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return badtype("integer", data)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error {
|
|
||||||
if b, ok := data.(bool); ok {
|
|
||||||
rv.SetBool(b)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return badtype("boolean", data)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error {
|
|
||||||
rv.Set(reflect.ValueOf(data))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error {
|
|
||||||
var s string
|
|
||||||
switch sdata := data.(type) {
|
|
||||||
case TextMarshaler:
|
|
||||||
text, err := sdata.MarshalText()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
s = string(text)
|
|
||||||
case fmt.Stringer:
|
|
||||||
s = sdata.String()
|
|
||||||
case string:
|
|
||||||
s = sdata
|
|
||||||
case bool:
|
|
||||||
s = fmt.Sprintf("%v", sdata)
|
|
||||||
case int64:
|
|
||||||
s = fmt.Sprintf("%d", sdata)
|
|
||||||
case float64:
|
|
||||||
s = fmt.Sprintf("%f", sdata)
|
|
||||||
default:
|
|
||||||
return badtype("primitive (string-like)", data)
|
|
||||||
}
|
|
||||||
if err := v.UnmarshalText([]byte(s)); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// rvalue returns a reflect.Value of `v`. All pointers are resolved.
|
|
||||||
func rvalue(v interface{}) reflect.Value {
|
|
||||||
return indirect(reflect.ValueOf(v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// indirect returns the value pointed to by a pointer.
|
|
||||||
// Pointers are followed until the value is not a pointer.
|
|
||||||
// New values are allocated for each nil pointer.
|
|
||||||
//
|
|
||||||
// An exception to this rule is if the value satisfies an interface of
|
|
||||||
// interest to us (like encoding.TextUnmarshaler).
|
|
||||||
func indirect(v reflect.Value) reflect.Value {
|
|
||||||
if v.Kind() != reflect.Ptr {
|
|
||||||
if v.CanSet() {
|
|
||||||
pv := v.Addr()
|
|
||||||
if _, ok := pv.Interface().(TextUnmarshaler); ok {
|
|
||||||
return pv
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
if v.IsNil() {
|
|
||||||
v.Set(reflect.New(v.Type().Elem()))
|
|
||||||
}
|
|
||||||
return indirect(reflect.Indirect(v))
|
|
||||||
}
|
|
||||||
|
|
||||||
func isUnifiable(rv reflect.Value) bool {
|
|
||||||
if rv.CanSet() {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if _, ok := rv.Interface().(TextUnmarshaler); ok {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func badtype(expected string, data interface{}) error {
|
|
||||||
return e("cannot load TOML value of type %T into a Go %s", data, expected)
|
|
||||||
}
|
|
121
vendor/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
121
vendor/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
@ -1,121 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
import "strings"
|
|
||||||
|
|
||||||
// MetaData allows access to meta information about TOML data that may not
|
|
||||||
// be inferrable via reflection. In particular, whether a key has been defined
|
|
||||||
// and the TOML type of a key.
|
|
||||||
type MetaData struct {
|
|
||||||
mapping map[string]interface{}
|
|
||||||
types map[string]tomlType
|
|
||||||
keys []Key
|
|
||||||
decoded map[string]bool
|
|
||||||
context Key // Used only during decoding.
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsDefined returns true if the key given exists in the TOML data. The key
|
|
||||||
// should be specified hierarchially. e.g.,
|
|
||||||
//
|
|
||||||
// // access the TOML key 'a.b.c'
|
|
||||||
// IsDefined("a", "b", "c")
|
|
||||||
//
|
|
||||||
// IsDefined will return false if an empty key given. Keys are case sensitive.
|
|
||||||
func (md *MetaData) IsDefined(key ...string) bool {
|
|
||||||
if len(key) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
var hash map[string]interface{}
|
|
||||||
var ok bool
|
|
||||||
var hashOrVal interface{} = md.mapping
|
|
||||||
for _, k := range key {
|
|
||||||
if hash, ok = hashOrVal.(map[string]interface{}); !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if hashOrVal, ok = hash[k]; !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Type returns a string representation of the type of the key specified.
|
|
||||||
//
|
|
||||||
// Type will return the empty string if given an empty key or a key that
|
|
||||||
// does not exist. Keys are case sensitive.
|
|
||||||
func (md *MetaData) Type(key ...string) string {
|
|
||||||
fullkey := strings.Join(key, ".")
|
|
||||||
if typ, ok := md.types[fullkey]; ok {
|
|
||||||
return typ.typeString()
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Key is the type of any TOML key, including key groups. Use (MetaData).Keys
|
|
||||||
// to get values of this type.
|
|
||||||
type Key []string
|
|
||||||
|
|
||||||
func (k Key) String() string {
|
|
||||||
return strings.Join(k, ".")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (k Key) maybeQuotedAll() string {
|
|
||||||
var ss []string
|
|
||||||
for i := range k {
|
|
||||||
ss = append(ss, k.maybeQuoted(i))
|
|
||||||
}
|
|
||||||
return strings.Join(ss, ".")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (k Key) maybeQuoted(i int) string {
|
|
||||||
quote := false
|
|
||||||
for _, c := range k[i] {
|
|
||||||
if !isBareKeyChar(c) {
|
|
||||||
quote = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if quote {
|
|
||||||
return "\"" + strings.Replace(k[i], "\"", "\\\"", -1) + "\""
|
|
||||||
}
|
|
||||||
return k[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (k Key) add(piece string) Key {
|
|
||||||
newKey := make(Key, len(k)+1)
|
|
||||||
copy(newKey, k)
|
|
||||||
newKey[len(k)] = piece
|
|
||||||
return newKey
|
|
||||||
}
|
|
||||||
|
|
||||||
// Keys returns a slice of every key in the TOML data, including key groups.
|
|
||||||
// Each key is itself a slice, where the first element is the top of the
|
|
||||||
// hierarchy and the last is the most specific.
|
|
||||||
//
|
|
||||||
// The list will have the same order as the keys appeared in the TOML data.
|
|
||||||
//
|
|
||||||
// All keys returned are non-empty.
|
|
||||||
func (md *MetaData) Keys() []Key {
|
|
||||||
return md.keys
|
|
||||||
}
|
|
||||||
|
|
||||||
// Undecoded returns all keys that have not been decoded in the order in which
|
|
||||||
// they appear in the original TOML document.
|
|
||||||
//
|
|
||||||
// This includes keys that haven't been decoded because of a Primitive value.
|
|
||||||
// Once the Primitive value is decoded, the keys will be considered decoded.
|
|
||||||
//
|
|
||||||
// Also note that decoding into an empty interface will result in no decoding,
|
|
||||||
// and so no keys will be considered decoded.
|
|
||||||
//
|
|
||||||
// In this sense, the Undecoded keys correspond to keys in the TOML document
|
|
||||||
// that do not have a concrete type in your representation.
|
|
||||||
func (md *MetaData) Undecoded() []Key {
|
|
||||||
undecoded := make([]Key, 0, len(md.keys))
|
|
||||||
for _, key := range md.keys {
|
|
||||||
if !md.decoded[key.String()] {
|
|
||||||
undecoded = append(undecoded, key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undecoded
|
|
||||||
}
|
|
27
vendor/github.com/BurntSushi/toml/doc.go
generated
vendored
27
vendor/github.com/BurntSushi/toml/doc.go
generated
vendored
@ -1,27 +0,0 @@
|
|||||||
/*
|
|
||||||
Package toml provides facilities for decoding and encoding TOML configuration
|
|
||||||
files via reflection. There is also support for delaying decoding with
|
|
||||||
the Primitive type, and querying the set of keys in a TOML document with the
|
|
||||||
MetaData type.
|
|
||||||
|
|
||||||
The specification implemented: https://github.com/toml-lang/toml
|
|
||||||
|
|
||||||
The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify
|
|
||||||
whether a file is a valid TOML document. It can also be used to print the
|
|
||||||
type of each key in a TOML document.
|
|
||||||
|
|
||||||
Testing
|
|
||||||
|
|
||||||
There are two important types of tests used for this package. The first is
|
|
||||||
contained inside '*_test.go' files and uses the standard Go unit testing
|
|
||||||
framework. These tests are primarily devoted to holistically testing the
|
|
||||||
decoder and encoder.
|
|
||||||
|
|
||||||
The second type of testing is used to verify the implementation's adherence
|
|
||||||
to the TOML specification. These tests have been factored into their own
|
|
||||||
project: https://github.com/BurntSushi/toml-test
|
|
||||||
|
|
||||||
The reason the tests are in a separate project is so that they can be used by
|
|
||||||
any implementation of TOML. Namely, it is language agnostic.
|
|
||||||
*/
|
|
||||||
package toml
|
|
568
vendor/github.com/BurntSushi/toml/encode.go
generated
vendored
568
vendor/github.com/BurntSushi/toml/encode.go
generated
vendored
@ -1,568 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"reflect"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type tomlEncodeError struct{ error }
|
|
||||||
|
|
||||||
var (
|
|
||||||
errArrayMixedElementTypes = errors.New(
|
|
||||||
"toml: cannot encode array with mixed element types")
|
|
||||||
errArrayNilElement = errors.New(
|
|
||||||
"toml: cannot encode array with nil element")
|
|
||||||
errNonString = errors.New(
|
|
||||||
"toml: cannot encode a map with non-string key type")
|
|
||||||
errAnonNonStruct = errors.New(
|
|
||||||
"toml: cannot encode an anonymous field that is not a struct")
|
|
||||||
errArrayNoTable = errors.New(
|
|
||||||
"toml: TOML array element cannot contain a table")
|
|
||||||
errNoKey = errors.New(
|
|
||||||
"toml: top-level values must be Go maps or structs")
|
|
||||||
errAnything = errors.New("") // used in testing
|
|
||||||
)
|
|
||||||
|
|
||||||
var quotedReplacer = strings.NewReplacer(
|
|
||||||
"\t", "\\t",
|
|
||||||
"\n", "\\n",
|
|
||||||
"\r", "\\r",
|
|
||||||
"\"", "\\\"",
|
|
||||||
"\\", "\\\\",
|
|
||||||
)
|
|
||||||
|
|
||||||
// Encoder controls the encoding of Go values to a TOML document to some
|
|
||||||
// io.Writer.
|
|
||||||
//
|
|
||||||
// The indentation level can be controlled with the Indent field.
|
|
||||||
type Encoder struct {
|
|
||||||
// A single indentation level. By default it is two spaces.
|
|
||||||
Indent string
|
|
||||||
|
|
||||||
// hasWritten is whether we have written any output to w yet.
|
|
||||||
hasWritten bool
|
|
||||||
w *bufio.Writer
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer
|
|
||||||
// given. By default, a single indentation level is 2 spaces.
|
|
||||||
func NewEncoder(w io.Writer) *Encoder {
|
|
||||||
return &Encoder{
|
|
||||||
w: bufio.NewWriter(w),
|
|
||||||
Indent: " ",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Encode writes a TOML representation of the Go value to the underlying
|
|
||||||
// io.Writer. If the value given cannot be encoded to a valid TOML document,
|
|
||||||
// then an error is returned.
|
|
||||||
//
|
|
||||||
// The mapping between Go values and TOML values should be precisely the same
|
|
||||||
// as for the Decode* functions. Similarly, the TextMarshaler interface is
|
|
||||||
// supported by encoding the resulting bytes as strings. (If you want to write
|
|
||||||
// arbitrary binary data then you will need to use something like base64 since
|
|
||||||
// TOML does not have any binary types.)
|
|
||||||
//
|
|
||||||
// When encoding TOML hashes (i.e., Go maps or structs), keys without any
|
|
||||||
// sub-hashes are encoded first.
|
|
||||||
//
|
|
||||||
// If a Go map is encoded, then its keys are sorted alphabetically for
|
|
||||||
// deterministic output. More control over this behavior may be provided if
|
|
||||||
// there is demand for it.
|
|
||||||
//
|
|
||||||
// Encoding Go values without a corresponding TOML representation---like map
|
|
||||||
// types with non-string keys---will cause an error to be returned. Similarly
|
|
||||||
// for mixed arrays/slices, arrays/slices with nil elements, embedded
|
|
||||||
// non-struct types and nested slices containing maps or structs.
|
|
||||||
// (e.g., [][]map[string]string is not allowed but []map[string]string is OK
|
|
||||||
// and so is []map[string][]string.)
|
|
||||||
func (enc *Encoder) Encode(v interface{}) error {
|
|
||||||
rv := eindirect(reflect.ValueOf(v))
|
|
||||||
if err := enc.safeEncode(Key([]string{}), rv); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return enc.w.Flush()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) {
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
if terr, ok := r.(tomlEncodeError); ok {
|
|
||||||
err = terr.error
|
|
||||||
return
|
|
||||||
}
|
|
||||||
panic(r)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
enc.encode(key, rv)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) encode(key Key, rv reflect.Value) {
|
|
||||||
// Special case. Time needs to be in ISO8601 format.
|
|
||||||
// Special case. If we can marshal the type to text, then we used that.
|
|
||||||
// Basically, this prevents the encoder for handling these types as
|
|
||||||
// generic structs (or whatever the underlying type of a TextMarshaler is).
|
|
||||||
switch rv.Interface().(type) {
|
|
||||||
case time.Time, TextMarshaler:
|
|
||||||
enc.keyEqElement(key, rv)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
k := rv.Kind()
|
|
||||||
switch k {
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
|
||||||
reflect.Int64,
|
|
||||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
|
||||||
reflect.Uint64,
|
|
||||||
reflect.Float32, reflect.Float64, reflect.String, reflect.Bool:
|
|
||||||
enc.keyEqElement(key, rv)
|
|
||||||
case reflect.Array, reflect.Slice:
|
|
||||||
if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) {
|
|
||||||
enc.eArrayOfTables(key, rv)
|
|
||||||
} else {
|
|
||||||
enc.keyEqElement(key, rv)
|
|
||||||
}
|
|
||||||
case reflect.Interface:
|
|
||||||
if rv.IsNil() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
enc.encode(key, rv.Elem())
|
|
||||||
case reflect.Map:
|
|
||||||
if rv.IsNil() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
enc.eTable(key, rv)
|
|
||||||
case reflect.Ptr:
|
|
||||||
if rv.IsNil() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
enc.encode(key, rv.Elem())
|
|
||||||
case reflect.Struct:
|
|
||||||
enc.eTable(key, rv)
|
|
||||||
default:
|
|
||||||
panic(e("unsupported type for key '%s': %s", key, k))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// eElement encodes any value that can be an array element (primitives and
|
|
||||||
// arrays).
|
|
||||||
func (enc *Encoder) eElement(rv reflect.Value) {
|
|
||||||
switch v := rv.Interface().(type) {
|
|
||||||
case time.Time:
|
|
||||||
// Special case time.Time as a primitive. Has to come before
|
|
||||||
// TextMarshaler below because time.Time implements
|
|
||||||
// encoding.TextMarshaler, but we need to always use UTC.
|
|
||||||
enc.wf(v.UTC().Format("2006-01-02T15:04:05Z"))
|
|
||||||
return
|
|
||||||
case TextMarshaler:
|
|
||||||
// Special case. Use text marshaler if it's available for this value.
|
|
||||||
if s, err := v.MarshalText(); err != nil {
|
|
||||||
encPanic(err)
|
|
||||||
} else {
|
|
||||||
enc.writeQuoted(string(s))
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
switch rv.Kind() {
|
|
||||||
case reflect.Bool:
|
|
||||||
enc.wf(strconv.FormatBool(rv.Bool()))
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
|
||||||
reflect.Int64:
|
|
||||||
enc.wf(strconv.FormatInt(rv.Int(), 10))
|
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16,
|
|
||||||
reflect.Uint32, reflect.Uint64:
|
|
||||||
enc.wf(strconv.FormatUint(rv.Uint(), 10))
|
|
||||||
case reflect.Float32:
|
|
||||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32)))
|
|
||||||
case reflect.Float64:
|
|
||||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64)))
|
|
||||||
case reflect.Array, reflect.Slice:
|
|
||||||
enc.eArrayOrSliceElement(rv)
|
|
||||||
case reflect.Interface:
|
|
||||||
enc.eElement(rv.Elem())
|
|
||||||
case reflect.String:
|
|
||||||
enc.writeQuoted(rv.String())
|
|
||||||
default:
|
|
||||||
panic(e("unexpected primitive type: %s", rv.Kind()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// By the TOML spec, all floats must have a decimal with at least one
|
|
||||||
// number on either side.
|
|
||||||
func floatAddDecimal(fstr string) string {
|
|
||||||
if !strings.Contains(fstr, ".") {
|
|
||||||
return fstr + ".0"
|
|
||||||
}
|
|
||||||
return fstr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) writeQuoted(s string) {
|
|
||||||
enc.wf("\"%s\"", quotedReplacer.Replace(s))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) {
|
|
||||||
length := rv.Len()
|
|
||||||
enc.wf("[")
|
|
||||||
for i := 0; i < length; i++ {
|
|
||||||
elem := rv.Index(i)
|
|
||||||
enc.eElement(elem)
|
|
||||||
if i != length-1 {
|
|
||||||
enc.wf(", ")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
enc.wf("]")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) {
|
|
||||||
if len(key) == 0 {
|
|
||||||
encPanic(errNoKey)
|
|
||||||
}
|
|
||||||
for i := 0; i < rv.Len(); i++ {
|
|
||||||
trv := rv.Index(i)
|
|
||||||
if isNil(trv) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
panicIfInvalidKey(key)
|
|
||||||
enc.newline()
|
|
||||||
enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll())
|
|
||||||
enc.newline()
|
|
||||||
enc.eMapOrStruct(key, trv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) eTable(key Key, rv reflect.Value) {
|
|
||||||
panicIfInvalidKey(key)
|
|
||||||
if len(key) == 1 {
|
|
||||||
// Output an extra newline between top-level tables.
|
|
||||||
// (The newline isn't written if nothing else has been written though.)
|
|
||||||
enc.newline()
|
|
||||||
}
|
|
||||||
if len(key) > 0 {
|
|
||||||
enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll())
|
|
||||||
enc.newline()
|
|
||||||
}
|
|
||||||
enc.eMapOrStruct(key, rv)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) {
|
|
||||||
switch rv := eindirect(rv); rv.Kind() {
|
|
||||||
case reflect.Map:
|
|
||||||
enc.eMap(key, rv)
|
|
||||||
case reflect.Struct:
|
|
||||||
enc.eStruct(key, rv)
|
|
||||||
default:
|
|
||||||
panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) eMap(key Key, rv reflect.Value) {
|
|
||||||
rt := rv.Type()
|
|
||||||
if rt.Key().Kind() != reflect.String {
|
|
||||||
encPanic(errNonString)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort keys so that we have deterministic output. And write keys directly
|
|
||||||
// underneath this key first, before writing sub-structs or sub-maps.
|
|
||||||
var mapKeysDirect, mapKeysSub []string
|
|
||||||
for _, mapKey := range rv.MapKeys() {
|
|
||||||
k := mapKey.String()
|
|
||||||
if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) {
|
|
||||||
mapKeysSub = append(mapKeysSub, k)
|
|
||||||
} else {
|
|
||||||
mapKeysDirect = append(mapKeysDirect, k)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var writeMapKeys = func(mapKeys []string) {
|
|
||||||
sort.Strings(mapKeys)
|
|
||||||
for _, mapKey := range mapKeys {
|
|
||||||
mrv := rv.MapIndex(reflect.ValueOf(mapKey))
|
|
||||||
if isNil(mrv) {
|
|
||||||
// Don't write anything for nil fields.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
enc.encode(key.add(mapKey), mrv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
writeMapKeys(mapKeysDirect)
|
|
||||||
writeMapKeys(mapKeysSub)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) eStruct(key Key, rv reflect.Value) {
|
|
||||||
// Write keys for fields directly under this key first, because if we write
|
|
||||||
// a field that creates a new table, then all keys under it will be in that
|
|
||||||
// table (not the one we're writing here).
|
|
||||||
rt := rv.Type()
|
|
||||||
var fieldsDirect, fieldsSub [][]int
|
|
||||||
var addFields func(rt reflect.Type, rv reflect.Value, start []int)
|
|
||||||
addFields = func(rt reflect.Type, rv reflect.Value, start []int) {
|
|
||||||
for i := 0; i < rt.NumField(); i++ {
|
|
||||||
f := rt.Field(i)
|
|
||||||
// skip unexported fields
|
|
||||||
if f.PkgPath != "" && !f.Anonymous {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
frv := rv.Field(i)
|
|
||||||
if f.Anonymous {
|
|
||||||
t := f.Type
|
|
||||||
switch t.Kind() {
|
|
||||||
case reflect.Struct:
|
|
||||||
// Treat anonymous struct fields with
|
|
||||||
// tag names as though they are not
|
|
||||||
// anonymous, like encoding/json does.
|
|
||||||
if getOptions(f.Tag).name == "" {
|
|
||||||
addFields(t, frv, f.Index)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
case reflect.Ptr:
|
|
||||||
if t.Elem().Kind() == reflect.Struct &&
|
|
||||||
getOptions(f.Tag).name == "" {
|
|
||||||
if !frv.IsNil() {
|
|
||||||
addFields(t.Elem(), frv.Elem(), f.Index)
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// Fall through to the normal field encoding logic below
|
|
||||||
// for non-struct anonymous fields.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if typeIsHash(tomlTypeOfGo(frv)) {
|
|
||||||
fieldsSub = append(fieldsSub, append(start, f.Index...))
|
|
||||||
} else {
|
|
||||||
fieldsDirect = append(fieldsDirect, append(start, f.Index...))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
addFields(rt, rv, nil)
|
|
||||||
|
|
||||||
var writeFields = func(fields [][]int) {
|
|
||||||
for _, fieldIndex := range fields {
|
|
||||||
sft := rt.FieldByIndex(fieldIndex)
|
|
||||||
sf := rv.FieldByIndex(fieldIndex)
|
|
||||||
if isNil(sf) {
|
|
||||||
// Don't write anything for nil fields.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
opts := getOptions(sft.Tag)
|
|
||||||
if opts.skip {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
keyName := sft.Name
|
|
||||||
if opts.name != "" {
|
|
||||||
keyName = opts.name
|
|
||||||
}
|
|
||||||
if opts.omitempty && isEmpty(sf) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if opts.omitzero && isZero(sf) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
enc.encode(key.add(keyName), sf)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
writeFields(fieldsDirect)
|
|
||||||
writeFields(fieldsSub)
|
|
||||||
}
|
|
||||||
|
|
||||||
// tomlTypeName returns the TOML type name of the Go value's type. It is
|
|
||||||
// used to determine whether the types of array elements are mixed (which is
|
|
||||||
// forbidden). If the Go value is nil, then it is illegal for it to be an array
|
|
||||||
// element, and valueIsNil is returned as true.
|
|
||||||
|
|
||||||
// Returns the TOML type of a Go value. The type may be `nil`, which means
|
|
||||||
// no concrete TOML type could be found.
|
|
||||||
func tomlTypeOfGo(rv reflect.Value) tomlType {
|
|
||||||
if isNil(rv) || !rv.IsValid() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
switch rv.Kind() {
|
|
||||||
case reflect.Bool:
|
|
||||||
return tomlBool
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
|
||||||
reflect.Int64,
|
|
||||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
|
||||||
reflect.Uint64:
|
|
||||||
return tomlInteger
|
|
||||||
case reflect.Float32, reflect.Float64:
|
|
||||||
return tomlFloat
|
|
||||||
case reflect.Array, reflect.Slice:
|
|
||||||
if typeEqual(tomlHash, tomlArrayType(rv)) {
|
|
||||||
return tomlArrayHash
|
|
||||||
}
|
|
||||||
return tomlArray
|
|
||||||
case reflect.Ptr, reflect.Interface:
|
|
||||||
return tomlTypeOfGo(rv.Elem())
|
|
||||||
case reflect.String:
|
|
||||||
return tomlString
|
|
||||||
case reflect.Map:
|
|
||||||
return tomlHash
|
|
||||||
case reflect.Struct:
|
|
||||||
switch rv.Interface().(type) {
|
|
||||||
case time.Time:
|
|
||||||
return tomlDatetime
|
|
||||||
case TextMarshaler:
|
|
||||||
return tomlString
|
|
||||||
default:
|
|
||||||
return tomlHash
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
panic("unexpected reflect.Kind: " + rv.Kind().String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// tomlArrayType returns the element type of a TOML array. The type returned
|
|
||||||
// may be nil if it cannot be determined (e.g., a nil slice or a zero length
|
|
||||||
// slize). This function may also panic if it finds a type that cannot be
|
|
||||||
// expressed in TOML (such as nil elements, heterogeneous arrays or directly
|
|
||||||
// nested arrays of tables).
|
|
||||||
func tomlArrayType(rv reflect.Value) tomlType {
|
|
||||||
if isNil(rv) || !rv.IsValid() || rv.Len() == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
firstType := tomlTypeOfGo(rv.Index(0))
|
|
||||||
if firstType == nil {
|
|
||||||
encPanic(errArrayNilElement)
|
|
||||||
}
|
|
||||||
|
|
||||||
rvlen := rv.Len()
|
|
||||||
for i := 1; i < rvlen; i++ {
|
|
||||||
elem := rv.Index(i)
|
|
||||||
switch elemType := tomlTypeOfGo(elem); {
|
|
||||||
case elemType == nil:
|
|
||||||
encPanic(errArrayNilElement)
|
|
||||||
case !typeEqual(firstType, elemType):
|
|
||||||
encPanic(errArrayMixedElementTypes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If we have a nested array, then we must make sure that the nested
|
|
||||||
// array contains ONLY primitives.
|
|
||||||
// This checks arbitrarily nested arrays.
|
|
||||||
if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) {
|
|
||||||
nest := tomlArrayType(eindirect(rv.Index(0)))
|
|
||||||
if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) {
|
|
||||||
encPanic(errArrayNoTable)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return firstType
|
|
||||||
}
|
|
||||||
|
|
||||||
type tagOptions struct {
|
|
||||||
skip bool // "-"
|
|
||||||
name string
|
|
||||||
omitempty bool
|
|
||||||
omitzero bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func getOptions(tag reflect.StructTag) tagOptions {
|
|
||||||
t := tag.Get("toml")
|
|
||||||
if t == "-" {
|
|
||||||
return tagOptions{skip: true}
|
|
||||||
}
|
|
||||||
var opts tagOptions
|
|
||||||
parts := strings.Split(t, ",")
|
|
||||||
opts.name = parts[0]
|
|
||||||
for _, s := range parts[1:] {
|
|
||||||
switch s {
|
|
||||||
case "omitempty":
|
|
||||||
opts.omitempty = true
|
|
||||||
case "omitzero":
|
|
||||||
opts.omitzero = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return opts
|
|
||||||
}
|
|
||||||
|
|
||||||
func isZero(rv reflect.Value) bool {
|
|
||||||
switch rv.Kind() {
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
|
||||||
return rv.Int() == 0
|
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
|
||||||
return rv.Uint() == 0
|
|
||||||
case reflect.Float32, reflect.Float64:
|
|
||||||
return rv.Float() == 0.0
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func isEmpty(rv reflect.Value) bool {
|
|
||||||
switch rv.Kind() {
|
|
||||||
case reflect.Array, reflect.Slice, reflect.Map, reflect.String:
|
|
||||||
return rv.Len() == 0
|
|
||||||
case reflect.Bool:
|
|
||||||
return !rv.Bool()
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) newline() {
|
|
||||||
if enc.hasWritten {
|
|
||||||
enc.wf("\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) keyEqElement(key Key, val reflect.Value) {
|
|
||||||
if len(key) == 0 {
|
|
||||||
encPanic(errNoKey)
|
|
||||||
}
|
|
||||||
panicIfInvalidKey(key)
|
|
||||||
enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1))
|
|
||||||
enc.eElement(val)
|
|
||||||
enc.newline()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) wf(format string, v ...interface{}) {
|
|
||||||
if _, err := fmt.Fprintf(enc.w, format, v...); err != nil {
|
|
||||||
encPanic(err)
|
|
||||||
}
|
|
||||||
enc.hasWritten = true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (enc *Encoder) indentStr(key Key) string {
|
|
||||||
return strings.Repeat(enc.Indent, len(key)-1)
|
|
||||||
}
|
|
||||||
|
|
||||||
func encPanic(err error) {
|
|
||||||
panic(tomlEncodeError{err})
|
|
||||||
}
|
|
||||||
|
|
||||||
func eindirect(v reflect.Value) reflect.Value {
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.Ptr, reflect.Interface:
|
|
||||||
return eindirect(v.Elem())
|
|
||||||
default:
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func isNil(rv reflect.Value) bool {
|
|
||||||
switch rv.Kind() {
|
|
||||||
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
|
||||||
return rv.IsNil()
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func panicIfInvalidKey(key Key) {
|
|
||||||
for _, k := range key {
|
|
||||||
if len(k) == 0 {
|
|
||||||
encPanic(e("Key '%s' is not a valid table name. Key names "+
|
|
||||||
"cannot be empty.", key.maybeQuotedAll()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func isValidKeyName(s string) bool {
|
|
||||||
return len(s) != 0
|
|
||||||
}
|
|
19
vendor/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
19
vendor/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
@ -1,19 +0,0 @@
|
|||||||
// +build go1.2
|
|
||||||
|
|
||||||
package toml
|
|
||||||
|
|
||||||
// In order to support Go 1.1, we define our own TextMarshaler and
|
|
||||||
// TextUnmarshaler types. For Go 1.2+, we just alias them with the
|
|
||||||
// standard library interfaces.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding"
|
|
||||||
)
|
|
||||||
|
|
||||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
|
||||||
// so that Go 1.1 can be supported.
|
|
||||||
type TextMarshaler encoding.TextMarshaler
|
|
||||||
|
|
||||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined
|
|
||||||
// here so that Go 1.1 can be supported.
|
|
||||||
type TextUnmarshaler encoding.TextUnmarshaler
|
|
18
vendor/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
18
vendor/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
@ -1,18 +0,0 @@
|
|||||||
// +build !go1.2
|
|
||||||
|
|
||||||
package toml
|
|
||||||
|
|
||||||
// These interfaces were introduced in Go 1.2, so we add them manually when
|
|
||||||
// compiling for Go 1.1.
|
|
||||||
|
|
||||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
|
||||||
// so that Go 1.1 can be supported.
|
|
||||||
type TextMarshaler interface {
|
|
||||||
MarshalText() (text []byte, err error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined
|
|
||||||
// here so that Go 1.1 can be supported.
|
|
||||||
type TextUnmarshaler interface {
|
|
||||||
UnmarshalText(text []byte) error
|
|
||||||
}
|
|
953
vendor/github.com/BurntSushi/toml/lex.go
generated
vendored
953
vendor/github.com/BurntSushi/toml/lex.go
generated
vendored
@ -1,953 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
type itemType int
|
|
||||||
|
|
||||||
const (
|
|
||||||
itemError itemType = iota
|
|
||||||
itemNIL // used in the parser to indicate no type
|
|
||||||
itemEOF
|
|
||||||
itemText
|
|
||||||
itemString
|
|
||||||
itemRawString
|
|
||||||
itemMultilineString
|
|
||||||
itemRawMultilineString
|
|
||||||
itemBool
|
|
||||||
itemInteger
|
|
||||||
itemFloat
|
|
||||||
itemDatetime
|
|
||||||
itemArray // the start of an array
|
|
||||||
itemArrayEnd
|
|
||||||
itemTableStart
|
|
||||||
itemTableEnd
|
|
||||||
itemArrayTableStart
|
|
||||||
itemArrayTableEnd
|
|
||||||
itemKeyStart
|
|
||||||
itemCommentStart
|
|
||||||
itemInlineTableStart
|
|
||||||
itemInlineTableEnd
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
eof = 0
|
|
||||||
comma = ','
|
|
||||||
tableStart = '['
|
|
||||||
tableEnd = ']'
|
|
||||||
arrayTableStart = '['
|
|
||||||
arrayTableEnd = ']'
|
|
||||||
tableSep = '.'
|
|
||||||
keySep = '='
|
|
||||||
arrayStart = '['
|
|
||||||
arrayEnd = ']'
|
|
||||||
commentStart = '#'
|
|
||||||
stringStart = '"'
|
|
||||||
stringEnd = '"'
|
|
||||||
rawStringStart = '\''
|
|
||||||
rawStringEnd = '\''
|
|
||||||
inlineTableStart = '{'
|
|
||||||
inlineTableEnd = '}'
|
|
||||||
)
|
|
||||||
|
|
||||||
type stateFn func(lx *lexer) stateFn
|
|
||||||
|
|
||||||
type lexer struct {
|
|
||||||
input string
|
|
||||||
start int
|
|
||||||
pos int
|
|
||||||
line int
|
|
||||||
state stateFn
|
|
||||||
items chan item
|
|
||||||
|
|
||||||
// Allow for backing up up to three runes.
|
|
||||||
// This is necessary because TOML contains 3-rune tokens (""" and ''').
|
|
||||||
prevWidths [3]int
|
|
||||||
nprev int // how many of prevWidths are in use
|
|
||||||
// If we emit an eof, we can still back up, but it is not OK to call
|
|
||||||
// next again.
|
|
||||||
atEOF bool
|
|
||||||
|
|
||||||
// A stack of state functions used to maintain context.
|
|
||||||
// The idea is to reuse parts of the state machine in various places.
|
|
||||||
// For example, values can appear at the top level or within arbitrarily
|
|
||||||
// nested arrays. The last state on the stack is used after a value has
|
|
||||||
// been lexed. Similarly for comments.
|
|
||||||
stack []stateFn
|
|
||||||
}
|
|
||||||
|
|
||||||
type item struct {
|
|
||||||
typ itemType
|
|
||||||
val string
|
|
||||||
line int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lx *lexer) nextItem() item {
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case item := <-lx.items:
|
|
||||||
return item
|
|
||||||
default:
|
|
||||||
lx.state = lx.state(lx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func lex(input string) *lexer {
|
|
||||||
lx := &lexer{
|
|
||||||
input: input,
|
|
||||||
state: lexTop,
|
|
||||||
line: 1,
|
|
||||||
items: make(chan item, 10),
|
|
||||||
stack: make([]stateFn, 0, 10),
|
|
||||||
}
|
|
||||||
return lx
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lx *lexer) push(state stateFn) {
|
|
||||||
lx.stack = append(lx.stack, state)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lx *lexer) pop() stateFn {
|
|
||||||
if len(lx.stack) == 0 {
|
|
||||||
return lx.errorf("BUG in lexer: no states to pop")
|
|
||||||
}
|
|
||||||
last := lx.stack[len(lx.stack)-1]
|
|
||||||
lx.stack = lx.stack[0 : len(lx.stack)-1]
|
|
||||||
return last
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lx *lexer) current() string {
|
|
||||||
return lx.input[lx.start:lx.pos]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lx *lexer) emit(typ itemType) {
|
|
||||||
lx.items <- item{typ, lx.current(), lx.line}
|
|
||||||
lx.start = lx.pos
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lx *lexer) emitTrim(typ itemType) {
|
|
||||||
lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line}
|
|
||||||
lx.start = lx.pos
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lx *lexer) next() (r rune) {
|
|
||||||
if lx.atEOF {
|
|
||||||
panic("next called after EOF")
|
|
||||||
}
|
|
||||||
if lx.pos >= len(lx.input) {
|
|
||||||
lx.atEOF = true
|
|
||||||
return eof
|
|
||||||
}
|
|
||||||
|
|
||||||
if lx.input[lx.pos] == '\n' {
|
|
||||||
lx.line++
|
|
||||||
}
|
|
||||||
lx.prevWidths[2] = lx.prevWidths[1]
|
|
||||||
lx.prevWidths[1] = lx.prevWidths[0]
|
|
||||||
if lx.nprev < 3 {
|
|
||||||
lx.nprev++
|
|
||||||
}
|
|
||||||
r, w := utf8.DecodeRuneInString(lx.input[lx.pos:])
|
|
||||||
lx.prevWidths[0] = w
|
|
||||||
lx.pos += w
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
|
|
||||||
// ignore skips over the pending input before this point.
|
|
||||||
func (lx *lexer) ignore() {
|
|
||||||
lx.start = lx.pos
|
|
||||||
}
|
|
||||||
|
|
||||||
// backup steps back one rune. Can be called only twice between calls to next.
|
|
||||||
func (lx *lexer) backup() {
|
|
||||||
if lx.atEOF {
|
|
||||||
lx.atEOF = false
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if lx.nprev < 1 {
|
|
||||||
panic("backed up too far")
|
|
||||||
}
|
|
||||||
w := lx.prevWidths[0]
|
|
||||||
lx.prevWidths[0] = lx.prevWidths[1]
|
|
||||||
lx.prevWidths[1] = lx.prevWidths[2]
|
|
||||||
lx.nprev--
|
|
||||||
lx.pos -= w
|
|
||||||
if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' {
|
|
||||||
lx.line--
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// accept consumes the next rune if it's equal to `valid`.
|
|
||||||
func (lx *lexer) accept(valid rune) bool {
|
|
||||||
if lx.next() == valid {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
lx.backup()
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// peek returns but does not consume the next rune in the input.
|
|
||||||
func (lx *lexer) peek() rune {
|
|
||||||
r := lx.next()
|
|
||||||
lx.backup()
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
|
|
||||||
// skip ignores all input that matches the given predicate.
|
|
||||||
func (lx *lexer) skip(pred func(rune) bool) {
|
|
||||||
for {
|
|
||||||
r := lx.next()
|
|
||||||
if pred(r) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
lx.backup()
|
|
||||||
lx.ignore()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// errorf stops all lexing by emitting an error and returning `nil`.
|
|
||||||
// Note that any value that is a character is escaped if it's a special
|
|
||||||
// character (newlines, tabs, etc.).
|
|
||||||
func (lx *lexer) errorf(format string, values ...interface{}) stateFn {
|
|
||||||
lx.items <- item{
|
|
||||||
itemError,
|
|
||||||
fmt.Sprintf(format, values...),
|
|
||||||
lx.line,
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexTop consumes elements at the top level of TOML data.
|
|
||||||
func lexTop(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
if isWhitespace(r) || isNL(r) {
|
|
||||||
return lexSkip(lx, lexTop)
|
|
||||||
}
|
|
||||||
switch r {
|
|
||||||
case commentStart:
|
|
||||||
lx.push(lexTop)
|
|
||||||
return lexCommentStart
|
|
||||||
case tableStart:
|
|
||||||
return lexTableStart
|
|
||||||
case eof:
|
|
||||||
if lx.pos > lx.start {
|
|
||||||
return lx.errorf("unexpected EOF")
|
|
||||||
}
|
|
||||||
lx.emit(itemEOF)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// At this point, the only valid item can be a key, so we back up
|
|
||||||
// and let the key lexer do the rest.
|
|
||||||
lx.backup()
|
|
||||||
lx.push(lexTopEnd)
|
|
||||||
return lexKeyStart
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexTopEnd is entered whenever a top-level item has been consumed. (A value
|
|
||||||
// or a table.) It must see only whitespace, and will turn back to lexTop
|
|
||||||
// upon a newline. If it sees EOF, it will quit the lexer successfully.
|
|
||||||
func lexTopEnd(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
switch {
|
|
||||||
case r == commentStart:
|
|
||||||
// a comment will read to a newline for us.
|
|
||||||
lx.push(lexTop)
|
|
||||||
return lexCommentStart
|
|
||||||
case isWhitespace(r):
|
|
||||||
return lexTopEnd
|
|
||||||
case isNL(r):
|
|
||||||
lx.ignore()
|
|
||||||
return lexTop
|
|
||||||
case r == eof:
|
|
||||||
lx.emit(itemEOF)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return lx.errorf("expected a top-level item to end with a newline, "+
|
|
||||||
"comment, or EOF, but got %q instead", r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexTable lexes the beginning of a table. Namely, it makes sure that
|
|
||||||
// it starts with a character other than '.' and ']'.
|
|
||||||
// It assumes that '[' has already been consumed.
|
|
||||||
// It also handles the case that this is an item in an array of tables.
|
|
||||||
// e.g., '[[name]]'.
|
|
||||||
func lexTableStart(lx *lexer) stateFn {
|
|
||||||
if lx.peek() == arrayTableStart {
|
|
||||||
lx.next()
|
|
||||||
lx.emit(itemArrayTableStart)
|
|
||||||
lx.push(lexArrayTableEnd)
|
|
||||||
} else {
|
|
||||||
lx.emit(itemTableStart)
|
|
||||||
lx.push(lexTableEnd)
|
|
||||||
}
|
|
||||||
return lexTableNameStart
|
|
||||||
}
|
|
||||||
|
|
||||||
func lexTableEnd(lx *lexer) stateFn {
|
|
||||||
lx.emit(itemTableEnd)
|
|
||||||
return lexTopEnd
|
|
||||||
}
|
|
||||||
|
|
||||||
func lexArrayTableEnd(lx *lexer) stateFn {
|
|
||||||
if r := lx.next(); r != arrayTableEnd {
|
|
||||||
return lx.errorf("expected end of table array name delimiter %q, "+
|
|
||||||
"but got %q instead", arrayTableEnd, r)
|
|
||||||
}
|
|
||||||
lx.emit(itemArrayTableEnd)
|
|
||||||
return lexTopEnd
|
|
||||||
}
|
|
||||||
|
|
||||||
func lexTableNameStart(lx *lexer) stateFn {
|
|
||||||
lx.skip(isWhitespace)
|
|
||||||
switch r := lx.peek(); {
|
|
||||||
case r == tableEnd || r == eof:
|
|
||||||
return lx.errorf("unexpected end of table name " +
|
|
||||||
"(table names cannot be empty)")
|
|
||||||
case r == tableSep:
|
|
||||||
return lx.errorf("unexpected table separator " +
|
|
||||||
"(table names cannot be empty)")
|
|
||||||
case r == stringStart || r == rawStringStart:
|
|
||||||
lx.ignore()
|
|
||||||
lx.push(lexTableNameEnd)
|
|
||||||
return lexValue // reuse string lexing
|
|
||||||
default:
|
|
||||||
return lexBareTableName
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexBareTableName lexes the name of a table. It assumes that at least one
|
|
||||||
// valid character for the table has already been read.
|
|
||||||
func lexBareTableName(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
if isBareKeyChar(r) {
|
|
||||||
return lexBareTableName
|
|
||||||
}
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemText)
|
|
||||||
return lexTableNameEnd
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexTableNameEnd reads the end of a piece of a table name, optionally
|
|
||||||
// consuming whitespace.
|
|
||||||
func lexTableNameEnd(lx *lexer) stateFn {
|
|
||||||
lx.skip(isWhitespace)
|
|
||||||
switch r := lx.next(); {
|
|
||||||
case isWhitespace(r):
|
|
||||||
return lexTableNameEnd
|
|
||||||
case r == tableSep:
|
|
||||||
lx.ignore()
|
|
||||||
return lexTableNameStart
|
|
||||||
case r == tableEnd:
|
|
||||||
return lx.pop()
|
|
||||||
default:
|
|
||||||
return lx.errorf("expected '.' or ']' to end table name, "+
|
|
||||||
"but got %q instead", r)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexKeyStart consumes a key name up until the first non-whitespace character.
|
|
||||||
// lexKeyStart will ignore whitespace.
|
|
||||||
func lexKeyStart(lx *lexer) stateFn {
|
|
||||||
r := lx.peek()
|
|
||||||
switch {
|
|
||||||
case r == keySep:
|
|
||||||
return lx.errorf("unexpected key separator %q", keySep)
|
|
||||||
case isWhitespace(r) || isNL(r):
|
|
||||||
lx.next()
|
|
||||||
return lexSkip(lx, lexKeyStart)
|
|
||||||
case r == stringStart || r == rawStringStart:
|
|
||||||
lx.ignore()
|
|
||||||
lx.emit(itemKeyStart)
|
|
||||||
lx.push(lexKeyEnd)
|
|
||||||
return lexValue // reuse string lexing
|
|
||||||
default:
|
|
||||||
lx.ignore()
|
|
||||||
lx.emit(itemKeyStart)
|
|
||||||
return lexBareKey
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexBareKey consumes the text of a bare key. Assumes that the first character
|
|
||||||
// (which is not whitespace) has not yet been consumed.
|
|
||||||
func lexBareKey(lx *lexer) stateFn {
|
|
||||||
switch r := lx.next(); {
|
|
||||||
case isBareKeyChar(r):
|
|
||||||
return lexBareKey
|
|
||||||
case isWhitespace(r):
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemText)
|
|
||||||
return lexKeyEnd
|
|
||||||
case r == keySep:
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemText)
|
|
||||||
return lexKeyEnd
|
|
||||||
default:
|
|
||||||
return lx.errorf("bare keys cannot contain %q", r)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexKeyEnd consumes the end of a key and trims whitespace (up to the key
|
|
||||||
// separator).
|
|
||||||
func lexKeyEnd(lx *lexer) stateFn {
|
|
||||||
switch r := lx.next(); {
|
|
||||||
case r == keySep:
|
|
||||||
return lexSkip(lx, lexValue)
|
|
||||||
case isWhitespace(r):
|
|
||||||
return lexSkip(lx, lexKeyEnd)
|
|
||||||
default:
|
|
||||||
return lx.errorf("expected key separator %q, but got %q instead",
|
|
||||||
keySep, r)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexValue starts the consumption of a value anywhere a value is expected.
|
|
||||||
// lexValue will ignore whitespace.
|
|
||||||
// After a value is lexed, the last state on the next is popped and returned.
|
|
||||||
func lexValue(lx *lexer) stateFn {
|
|
||||||
// We allow whitespace to precede a value, but NOT newlines.
|
|
||||||
// In array syntax, the array states are responsible for ignoring newlines.
|
|
||||||
r := lx.next()
|
|
||||||
switch {
|
|
||||||
case isWhitespace(r):
|
|
||||||
return lexSkip(lx, lexValue)
|
|
||||||
case isDigit(r):
|
|
||||||
lx.backup() // avoid an extra state and use the same as above
|
|
||||||
return lexNumberOrDateStart
|
|
||||||
}
|
|
||||||
switch r {
|
|
||||||
case arrayStart:
|
|
||||||
lx.ignore()
|
|
||||||
lx.emit(itemArray)
|
|
||||||
return lexArrayValue
|
|
||||||
case inlineTableStart:
|
|
||||||
lx.ignore()
|
|
||||||
lx.emit(itemInlineTableStart)
|
|
||||||
return lexInlineTableValue
|
|
||||||
case stringStart:
|
|
||||||
if lx.accept(stringStart) {
|
|
||||||
if lx.accept(stringStart) {
|
|
||||||
lx.ignore() // Ignore """
|
|
||||||
return lexMultilineString
|
|
||||||
}
|
|
||||||
lx.backup()
|
|
||||||
}
|
|
||||||
lx.ignore() // ignore the '"'
|
|
||||||
return lexString
|
|
||||||
case rawStringStart:
|
|
||||||
if lx.accept(rawStringStart) {
|
|
||||||
if lx.accept(rawStringStart) {
|
|
||||||
lx.ignore() // Ignore """
|
|
||||||
return lexMultilineRawString
|
|
||||||
}
|
|
||||||
lx.backup()
|
|
||||||
}
|
|
||||||
lx.ignore() // ignore the "'"
|
|
||||||
return lexRawString
|
|
||||||
case '+', '-':
|
|
||||||
return lexNumberStart
|
|
||||||
case '.': // special error case, be kind to users
|
|
||||||
return lx.errorf("floats must start with a digit, not '.'")
|
|
||||||
}
|
|
||||||
if unicode.IsLetter(r) {
|
|
||||||
// Be permissive here; lexBool will give a nice error if the
|
|
||||||
// user wrote something like
|
|
||||||
// x = foo
|
|
||||||
// (i.e. not 'true' or 'false' but is something else word-like.)
|
|
||||||
lx.backup()
|
|
||||||
return lexBool
|
|
||||||
}
|
|
||||||
return lx.errorf("expected value but found %q instead", r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexArrayValue consumes one value in an array. It assumes that '[' or ','
|
|
||||||
// have already been consumed. All whitespace and newlines are ignored.
|
|
||||||
func lexArrayValue(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
switch {
|
|
||||||
case isWhitespace(r) || isNL(r):
|
|
||||||
return lexSkip(lx, lexArrayValue)
|
|
||||||
case r == commentStart:
|
|
||||||
lx.push(lexArrayValue)
|
|
||||||
return lexCommentStart
|
|
||||||
case r == comma:
|
|
||||||
return lx.errorf("unexpected comma")
|
|
||||||
case r == arrayEnd:
|
|
||||||
// NOTE(caleb): The spec isn't clear about whether you can have
|
|
||||||
// a trailing comma or not, so we'll allow it.
|
|
||||||
return lexArrayEnd
|
|
||||||
}
|
|
||||||
|
|
||||||
lx.backup()
|
|
||||||
lx.push(lexArrayValueEnd)
|
|
||||||
return lexValue
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexArrayValueEnd consumes everything between the end of an array value and
|
|
||||||
// the next value (or the end of the array): it ignores whitespace and newlines
|
|
||||||
// and expects either a ',' or a ']'.
|
|
||||||
func lexArrayValueEnd(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
switch {
|
|
||||||
case isWhitespace(r) || isNL(r):
|
|
||||||
return lexSkip(lx, lexArrayValueEnd)
|
|
||||||
case r == commentStart:
|
|
||||||
lx.push(lexArrayValueEnd)
|
|
||||||
return lexCommentStart
|
|
||||||
case r == comma:
|
|
||||||
lx.ignore()
|
|
||||||
return lexArrayValue // move on to the next value
|
|
||||||
case r == arrayEnd:
|
|
||||||
return lexArrayEnd
|
|
||||||
}
|
|
||||||
return lx.errorf(
|
|
||||||
"expected a comma or array terminator %q, but got %q instead",
|
|
||||||
arrayEnd, r,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexArrayEnd finishes the lexing of an array.
|
|
||||||
// It assumes that a ']' has just been consumed.
|
|
||||||
func lexArrayEnd(lx *lexer) stateFn {
|
|
||||||
lx.ignore()
|
|
||||||
lx.emit(itemArrayEnd)
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexInlineTableValue consumes one key/value pair in an inline table.
|
|
||||||
// It assumes that '{' or ',' have already been consumed. Whitespace is ignored.
|
|
||||||
func lexInlineTableValue(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
switch {
|
|
||||||
case isWhitespace(r):
|
|
||||||
return lexSkip(lx, lexInlineTableValue)
|
|
||||||
case isNL(r):
|
|
||||||
return lx.errorf("newlines not allowed within inline tables")
|
|
||||||
case r == commentStart:
|
|
||||||
lx.push(lexInlineTableValue)
|
|
||||||
return lexCommentStart
|
|
||||||
case r == comma:
|
|
||||||
return lx.errorf("unexpected comma")
|
|
||||||
case r == inlineTableEnd:
|
|
||||||
return lexInlineTableEnd
|
|
||||||
}
|
|
||||||
lx.backup()
|
|
||||||
lx.push(lexInlineTableValueEnd)
|
|
||||||
return lexKeyStart
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexInlineTableValueEnd consumes everything between the end of an inline table
|
|
||||||
// key/value pair and the next pair (or the end of the table):
|
|
||||||
// it ignores whitespace and expects either a ',' or a '}'.
|
|
||||||
func lexInlineTableValueEnd(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
switch {
|
|
||||||
case isWhitespace(r):
|
|
||||||
return lexSkip(lx, lexInlineTableValueEnd)
|
|
||||||
case isNL(r):
|
|
||||||
return lx.errorf("newlines not allowed within inline tables")
|
|
||||||
case r == commentStart:
|
|
||||||
lx.push(lexInlineTableValueEnd)
|
|
||||||
return lexCommentStart
|
|
||||||
case r == comma:
|
|
||||||
lx.ignore()
|
|
||||||
return lexInlineTableValue
|
|
||||||
case r == inlineTableEnd:
|
|
||||||
return lexInlineTableEnd
|
|
||||||
}
|
|
||||||
return lx.errorf("expected a comma or an inline table terminator %q, "+
|
|
||||||
"but got %q instead", inlineTableEnd, r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexInlineTableEnd finishes the lexing of an inline table.
|
|
||||||
// It assumes that a '}' has just been consumed.
|
|
||||||
func lexInlineTableEnd(lx *lexer) stateFn {
|
|
||||||
lx.ignore()
|
|
||||||
lx.emit(itemInlineTableEnd)
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexString consumes the inner contents of a string. It assumes that the
|
|
||||||
// beginning '"' has already been consumed and ignored.
|
|
||||||
func lexString(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
switch {
|
|
||||||
case r == eof:
|
|
||||||
return lx.errorf("unexpected EOF")
|
|
||||||
case isNL(r):
|
|
||||||
return lx.errorf("strings cannot contain newlines")
|
|
||||||
case r == '\\':
|
|
||||||
lx.push(lexString)
|
|
||||||
return lexStringEscape
|
|
||||||
case r == stringEnd:
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemString)
|
|
||||||
lx.next()
|
|
||||||
lx.ignore()
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
return lexString
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexMultilineString consumes the inner contents of a string. It assumes that
|
|
||||||
// the beginning '"""' has already been consumed and ignored.
|
|
||||||
func lexMultilineString(lx *lexer) stateFn {
|
|
||||||
switch lx.next() {
|
|
||||||
case eof:
|
|
||||||
return lx.errorf("unexpected EOF")
|
|
||||||
case '\\':
|
|
||||||
return lexMultilineStringEscape
|
|
||||||
case stringEnd:
|
|
||||||
if lx.accept(stringEnd) {
|
|
||||||
if lx.accept(stringEnd) {
|
|
||||||
lx.backup()
|
|
||||||
lx.backup()
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemMultilineString)
|
|
||||||
lx.next()
|
|
||||||
lx.next()
|
|
||||||
lx.next()
|
|
||||||
lx.ignore()
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
lx.backup()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return lexMultilineString
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexRawString consumes a raw string. Nothing can be escaped in such a string.
|
|
||||||
// It assumes that the beginning "'" has already been consumed and ignored.
|
|
||||||
func lexRawString(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
switch {
|
|
||||||
case r == eof:
|
|
||||||
return lx.errorf("unexpected EOF")
|
|
||||||
case isNL(r):
|
|
||||||
return lx.errorf("strings cannot contain newlines")
|
|
||||||
case r == rawStringEnd:
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemRawString)
|
|
||||||
lx.next()
|
|
||||||
lx.ignore()
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
return lexRawString
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexMultilineRawString consumes a raw string. Nothing can be escaped in such
|
|
||||||
// a string. It assumes that the beginning "'''" has already been consumed and
|
|
||||||
// ignored.
|
|
||||||
func lexMultilineRawString(lx *lexer) stateFn {
|
|
||||||
switch lx.next() {
|
|
||||||
case eof:
|
|
||||||
return lx.errorf("unexpected EOF")
|
|
||||||
case rawStringEnd:
|
|
||||||
if lx.accept(rawStringEnd) {
|
|
||||||
if lx.accept(rawStringEnd) {
|
|
||||||
lx.backup()
|
|
||||||
lx.backup()
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemRawMultilineString)
|
|
||||||
lx.next()
|
|
||||||
lx.next()
|
|
||||||
lx.next()
|
|
||||||
lx.ignore()
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
lx.backup()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return lexMultilineRawString
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexMultilineStringEscape consumes an escaped character. It assumes that the
|
|
||||||
// preceding '\\' has already been consumed.
|
|
||||||
func lexMultilineStringEscape(lx *lexer) stateFn {
|
|
||||||
// Handle the special case first:
|
|
||||||
if isNL(lx.next()) {
|
|
||||||
return lexMultilineString
|
|
||||||
}
|
|
||||||
lx.backup()
|
|
||||||
lx.push(lexMultilineString)
|
|
||||||
return lexStringEscape(lx)
|
|
||||||
}
|
|
||||||
|
|
||||||
func lexStringEscape(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
switch r {
|
|
||||||
case 'b':
|
|
||||||
fallthrough
|
|
||||||
case 't':
|
|
||||||
fallthrough
|
|
||||||
case 'n':
|
|
||||||
fallthrough
|
|
||||||
case 'f':
|
|
||||||
fallthrough
|
|
||||||
case 'r':
|
|
||||||
fallthrough
|
|
||||||
case '"':
|
|
||||||
fallthrough
|
|
||||||
case '\\':
|
|
||||||
return lx.pop()
|
|
||||||
case 'u':
|
|
||||||
return lexShortUnicodeEscape
|
|
||||||
case 'U':
|
|
||||||
return lexLongUnicodeEscape
|
|
||||||
}
|
|
||||||
return lx.errorf("invalid escape character %q; only the following "+
|
|
||||||
"escape characters are allowed: "+
|
|
||||||
`\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX`, r)
|
|
||||||
}
|
|
||||||
|
|
||||||
func lexShortUnicodeEscape(lx *lexer) stateFn {
|
|
||||||
var r rune
|
|
||||||
for i := 0; i < 4; i++ {
|
|
||||||
r = lx.next()
|
|
||||||
if !isHexadecimal(r) {
|
|
||||||
return lx.errorf(`expected four hexadecimal digits after '\u', `+
|
|
||||||
"but got %q instead", lx.current())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
func lexLongUnicodeEscape(lx *lexer) stateFn {
|
|
||||||
var r rune
|
|
||||||
for i := 0; i < 8; i++ {
|
|
||||||
r = lx.next()
|
|
||||||
if !isHexadecimal(r) {
|
|
||||||
return lx.errorf(`expected eight hexadecimal digits after '\U', `+
|
|
||||||
"but got %q instead", lx.current())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexNumberOrDateStart consumes either an integer, a float, or datetime.
|
|
||||||
func lexNumberOrDateStart(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
if isDigit(r) {
|
|
||||||
return lexNumberOrDate
|
|
||||||
}
|
|
||||||
switch r {
|
|
||||||
case '_':
|
|
||||||
return lexNumber
|
|
||||||
case 'e', 'E':
|
|
||||||
return lexFloat
|
|
||||||
case '.':
|
|
||||||
return lx.errorf("floats must start with a digit, not '.'")
|
|
||||||
}
|
|
||||||
return lx.errorf("expected a digit but got %q", r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexNumberOrDate consumes either an integer, float or datetime.
|
|
||||||
func lexNumberOrDate(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
if isDigit(r) {
|
|
||||||
return lexNumberOrDate
|
|
||||||
}
|
|
||||||
switch r {
|
|
||||||
case '-':
|
|
||||||
return lexDatetime
|
|
||||||
case '_':
|
|
||||||
return lexNumber
|
|
||||||
case '.', 'e', 'E':
|
|
||||||
return lexFloat
|
|
||||||
}
|
|
||||||
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemInteger)
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexDatetime consumes a Datetime, to a first approximation.
|
|
||||||
// The parser validates that it matches one of the accepted formats.
|
|
||||||
func lexDatetime(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
if isDigit(r) {
|
|
||||||
return lexDatetime
|
|
||||||
}
|
|
||||||
switch r {
|
|
||||||
case '-', 'T', ':', '.', 'Z', '+':
|
|
||||||
return lexDatetime
|
|
||||||
}
|
|
||||||
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemDatetime)
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexNumberStart consumes either an integer or a float. It assumes that a sign
|
|
||||||
// has already been read, but that *no* digits have been consumed.
|
|
||||||
// lexNumberStart will move to the appropriate integer or float states.
|
|
||||||
func lexNumberStart(lx *lexer) stateFn {
|
|
||||||
// We MUST see a digit. Even floats have to start with a digit.
|
|
||||||
r := lx.next()
|
|
||||||
if !isDigit(r) {
|
|
||||||
if r == '.' {
|
|
||||||
return lx.errorf("floats must start with a digit, not '.'")
|
|
||||||
}
|
|
||||||
return lx.errorf("expected a digit but got %q", r)
|
|
||||||
}
|
|
||||||
return lexNumber
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexNumber consumes an integer or a float after seeing the first digit.
|
|
||||||
func lexNumber(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
if isDigit(r) {
|
|
||||||
return lexNumber
|
|
||||||
}
|
|
||||||
switch r {
|
|
||||||
case '_':
|
|
||||||
return lexNumber
|
|
||||||
case '.', 'e', 'E':
|
|
||||||
return lexFloat
|
|
||||||
}
|
|
||||||
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemInteger)
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexFloat consumes the elements of a float. It allows any sequence of
|
|
||||||
// float-like characters, so floats emitted by the lexer are only a first
|
|
||||||
// approximation and must be validated by the parser.
|
|
||||||
func lexFloat(lx *lexer) stateFn {
|
|
||||||
r := lx.next()
|
|
||||||
if isDigit(r) {
|
|
||||||
return lexFloat
|
|
||||||
}
|
|
||||||
switch r {
|
|
||||||
case '_', '.', '-', '+', 'e', 'E':
|
|
||||||
return lexFloat
|
|
||||||
}
|
|
||||||
|
|
||||||
lx.backup()
|
|
||||||
lx.emit(itemFloat)
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexBool consumes a bool string: 'true' or 'false.
|
|
||||||
func lexBool(lx *lexer) stateFn {
|
|
||||||
var rs []rune
|
|
||||||
for {
|
|
||||||
r := lx.next()
|
|
||||||
if !unicode.IsLetter(r) {
|
|
||||||
lx.backup()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
rs = append(rs, r)
|
|
||||||
}
|
|
||||||
s := string(rs)
|
|
||||||
switch s {
|
|
||||||
case "true", "false":
|
|
||||||
lx.emit(itemBool)
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
return lx.errorf("expected value but found %q instead", s)
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexCommentStart begins the lexing of a comment. It will emit
|
|
||||||
// itemCommentStart and consume no characters, passing control to lexComment.
|
|
||||||
func lexCommentStart(lx *lexer) stateFn {
|
|
||||||
lx.ignore()
|
|
||||||
lx.emit(itemCommentStart)
|
|
||||||
return lexComment
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexComment lexes an entire comment. It assumes that '#' has been consumed.
|
|
||||||
// It will consume *up to* the first newline character, and pass control
|
|
||||||
// back to the last state on the stack.
|
|
||||||
func lexComment(lx *lexer) stateFn {
|
|
||||||
r := lx.peek()
|
|
||||||
if isNL(r) || r == eof {
|
|
||||||
lx.emit(itemText)
|
|
||||||
return lx.pop()
|
|
||||||
}
|
|
||||||
lx.next()
|
|
||||||
return lexComment
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexSkip ignores all slurped input and moves on to the next state.
|
|
||||||
func lexSkip(lx *lexer, nextState stateFn) stateFn {
|
|
||||||
return func(lx *lexer) stateFn {
|
|
||||||
lx.ignore()
|
|
||||||
return nextState
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// isWhitespace returns true if `r` is a whitespace character according
|
|
||||||
// to the spec.
|
|
||||||
func isWhitespace(r rune) bool {
|
|
||||||
return r == '\t' || r == ' '
|
|
||||||
}
|
|
||||||
|
|
||||||
func isNL(r rune) bool {
|
|
||||||
return r == '\n' || r == '\r'
|
|
||||||
}
|
|
||||||
|
|
||||||
func isDigit(r rune) bool {
|
|
||||||
return r >= '0' && r <= '9'
|
|
||||||
}
|
|
||||||
|
|
||||||
func isHexadecimal(r rune) bool {
|
|
||||||
return (r >= '0' && r <= '9') ||
|
|
||||||
(r >= 'a' && r <= 'f') ||
|
|
||||||
(r >= 'A' && r <= 'F')
|
|
||||||
}
|
|
||||||
|
|
||||||
func isBareKeyChar(r rune) bool {
|
|
||||||
return (r >= 'A' && r <= 'Z') ||
|
|
||||||
(r >= 'a' && r <= 'z') ||
|
|
||||||
(r >= '0' && r <= '9') ||
|
|
||||||
r == '_' ||
|
|
||||||
r == '-'
|
|
||||||
}
|
|
||||||
|
|
||||||
func (itype itemType) String() string {
|
|
||||||
switch itype {
|
|
||||||
case itemError:
|
|
||||||
return "Error"
|
|
||||||
case itemNIL:
|
|
||||||
return "NIL"
|
|
||||||
case itemEOF:
|
|
||||||
return "EOF"
|
|
||||||
case itemText:
|
|
||||||
return "Text"
|
|
||||||
case itemString, itemRawString, itemMultilineString, itemRawMultilineString:
|
|
||||||
return "String"
|
|
||||||
case itemBool:
|
|
||||||
return "Bool"
|
|
||||||
case itemInteger:
|
|
||||||
return "Integer"
|
|
||||||
case itemFloat:
|
|
||||||
return "Float"
|
|
||||||
case itemDatetime:
|
|
||||||
return "DateTime"
|
|
||||||
case itemTableStart:
|
|
||||||
return "TableStart"
|
|
||||||
case itemTableEnd:
|
|
||||||
return "TableEnd"
|
|
||||||
case itemKeyStart:
|
|
||||||
return "KeyStart"
|
|
||||||
case itemArray:
|
|
||||||
return "Array"
|
|
||||||
case itemArrayEnd:
|
|
||||||
return "ArrayEnd"
|
|
||||||
case itemCommentStart:
|
|
||||||
return "CommentStart"
|
|
||||||
}
|
|
||||||
panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype)))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (item item) String() string {
|
|
||||||
return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val)
|
|
||||||
}
|
|
592
vendor/github.com/BurntSushi/toml/parse.go
generated
vendored
592
vendor/github.com/BurntSushi/toml/parse.go
generated
vendored
@ -1,592 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
type parser struct {
|
|
||||||
mapping map[string]interface{}
|
|
||||||
types map[string]tomlType
|
|
||||||
lx *lexer
|
|
||||||
|
|
||||||
// A list of keys in the order that they appear in the TOML data.
|
|
||||||
ordered []Key
|
|
||||||
|
|
||||||
// the full key for the current hash in scope
|
|
||||||
context Key
|
|
||||||
|
|
||||||
// the base key name for everything except hashes
|
|
||||||
currentKey string
|
|
||||||
|
|
||||||
// rough approximation of line number
|
|
||||||
approxLine int
|
|
||||||
|
|
||||||
// A map of 'key.group.names' to whether they were created implicitly.
|
|
||||||
implicits map[string]bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type parseError string
|
|
||||||
|
|
||||||
func (pe parseError) Error() string {
|
|
||||||
return string(pe)
|
|
||||||
}
|
|
||||||
|
|
||||||
func parse(data string) (p *parser, err error) {
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
var ok bool
|
|
||||||
if err, ok = r.(parseError); ok {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
panic(r)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
p = &parser{
|
|
||||||
mapping: make(map[string]interface{}),
|
|
||||||
types: make(map[string]tomlType),
|
|
||||||
lx: lex(data),
|
|
||||||
ordered: make([]Key, 0),
|
|
||||||
implicits: make(map[string]bool),
|
|
||||||
}
|
|
||||||
for {
|
|
||||||
item := p.next()
|
|
||||||
if item.typ == itemEOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
p.topLevel(item)
|
|
||||||
}
|
|
||||||
|
|
||||||
return p, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) panicf(format string, v ...interface{}) {
|
|
||||||
msg := fmt.Sprintf("Near line %d (last key parsed '%s'): %s",
|
|
||||||
p.approxLine, p.current(), fmt.Sprintf(format, v...))
|
|
||||||
panic(parseError(msg))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) next() item {
|
|
||||||
it := p.lx.nextItem()
|
|
||||||
if it.typ == itemError {
|
|
||||||
p.panicf("%s", it.val)
|
|
||||||
}
|
|
||||||
return it
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) bug(format string, v ...interface{}) {
|
|
||||||
panic(fmt.Sprintf("BUG: "+format+"\n\n", v...))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) expect(typ itemType) item {
|
|
||||||
it := p.next()
|
|
||||||
p.assertEqual(typ, it.typ)
|
|
||||||
return it
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) assertEqual(expected, got itemType) {
|
|
||||||
if expected != got {
|
|
||||||
p.bug("Expected '%s' but got '%s'.", expected, got)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) topLevel(item item) {
|
|
||||||
switch item.typ {
|
|
||||||
case itemCommentStart:
|
|
||||||
p.approxLine = item.line
|
|
||||||
p.expect(itemText)
|
|
||||||
case itemTableStart:
|
|
||||||
kg := p.next()
|
|
||||||
p.approxLine = kg.line
|
|
||||||
|
|
||||||
var key Key
|
|
||||||
for ; kg.typ != itemTableEnd && kg.typ != itemEOF; kg = p.next() {
|
|
||||||
key = append(key, p.keyString(kg))
|
|
||||||
}
|
|
||||||
p.assertEqual(itemTableEnd, kg.typ)
|
|
||||||
|
|
||||||
p.establishContext(key, false)
|
|
||||||
p.setType("", tomlHash)
|
|
||||||
p.ordered = append(p.ordered, key)
|
|
||||||
case itemArrayTableStart:
|
|
||||||
kg := p.next()
|
|
||||||
p.approxLine = kg.line
|
|
||||||
|
|
||||||
var key Key
|
|
||||||
for ; kg.typ != itemArrayTableEnd && kg.typ != itemEOF; kg = p.next() {
|
|
||||||
key = append(key, p.keyString(kg))
|
|
||||||
}
|
|
||||||
p.assertEqual(itemArrayTableEnd, kg.typ)
|
|
||||||
|
|
||||||
p.establishContext(key, true)
|
|
||||||
p.setType("", tomlArrayHash)
|
|
||||||
p.ordered = append(p.ordered, key)
|
|
||||||
case itemKeyStart:
|
|
||||||
kname := p.next()
|
|
||||||
p.approxLine = kname.line
|
|
||||||
p.currentKey = p.keyString(kname)
|
|
||||||
|
|
||||||
val, typ := p.value(p.next())
|
|
||||||
p.setValue(p.currentKey, val)
|
|
||||||
p.setType(p.currentKey, typ)
|
|
||||||
p.ordered = append(p.ordered, p.context.add(p.currentKey))
|
|
||||||
p.currentKey = ""
|
|
||||||
default:
|
|
||||||
p.bug("Unexpected type at top level: %s", item.typ)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gets a string for a key (or part of a key in a table name).
|
|
||||||
func (p *parser) keyString(it item) string {
|
|
||||||
switch it.typ {
|
|
||||||
case itemText:
|
|
||||||
return it.val
|
|
||||||
case itemString, itemMultilineString,
|
|
||||||
itemRawString, itemRawMultilineString:
|
|
||||||
s, _ := p.value(it)
|
|
||||||
return s.(string)
|
|
||||||
default:
|
|
||||||
p.bug("Unexpected key type: %s", it.typ)
|
|
||||||
panic("unreachable")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// value translates an expected value from the lexer into a Go value wrapped
|
|
||||||
// as an empty interface.
|
|
||||||
func (p *parser) value(it item) (interface{}, tomlType) {
|
|
||||||
switch it.typ {
|
|
||||||
case itemString:
|
|
||||||
return p.replaceEscapes(it.val), p.typeOfPrimitive(it)
|
|
||||||
case itemMultilineString:
|
|
||||||
trimmed := stripFirstNewline(stripEscapedWhitespace(it.val))
|
|
||||||
return p.replaceEscapes(trimmed), p.typeOfPrimitive(it)
|
|
||||||
case itemRawString:
|
|
||||||
return it.val, p.typeOfPrimitive(it)
|
|
||||||
case itemRawMultilineString:
|
|
||||||
return stripFirstNewline(it.val), p.typeOfPrimitive(it)
|
|
||||||
case itemBool:
|
|
||||||
switch it.val {
|
|
||||||
case "true":
|
|
||||||
return true, p.typeOfPrimitive(it)
|
|
||||||
case "false":
|
|
||||||
return false, p.typeOfPrimitive(it)
|
|
||||||
}
|
|
||||||
p.bug("Expected boolean value, but got '%s'.", it.val)
|
|
||||||
case itemInteger:
|
|
||||||
if !numUnderscoresOK(it.val) {
|
|
||||||
p.panicf("Invalid integer %q: underscores must be surrounded by digits",
|
|
||||||
it.val)
|
|
||||||
}
|
|
||||||
val := strings.Replace(it.val, "_", "", -1)
|
|
||||||
num, err := strconv.ParseInt(val, 10, 64)
|
|
||||||
if err != nil {
|
|
||||||
// Distinguish integer values. Normally, it'd be a bug if the lexer
|
|
||||||
// provides an invalid integer, but it's possible that the number is
|
|
||||||
// out of range of valid values (which the lexer cannot determine).
|
|
||||||
// So mark the former as a bug but the latter as a legitimate user
|
|
||||||
// error.
|
|
||||||
if e, ok := err.(*strconv.NumError); ok &&
|
|
||||||
e.Err == strconv.ErrRange {
|
|
||||||
|
|
||||||
p.panicf("Integer '%s' is out of the range of 64-bit "+
|
|
||||||
"signed integers.", it.val)
|
|
||||||
} else {
|
|
||||||
p.bug("Expected integer value, but got '%s'.", it.val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return num, p.typeOfPrimitive(it)
|
|
||||||
case itemFloat:
|
|
||||||
parts := strings.FieldsFunc(it.val, func(r rune) bool {
|
|
||||||
switch r {
|
|
||||||
case '.', 'e', 'E':
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
})
|
|
||||||
for _, part := range parts {
|
|
||||||
if !numUnderscoresOK(part) {
|
|
||||||
p.panicf("Invalid float %q: underscores must be "+
|
|
||||||
"surrounded by digits", it.val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !numPeriodsOK(it.val) {
|
|
||||||
// As a special case, numbers like '123.' or '1.e2',
|
|
||||||
// which are valid as far as Go/strconv are concerned,
|
|
||||||
// must be rejected because TOML says that a fractional
|
|
||||||
// part consists of '.' followed by 1+ digits.
|
|
||||||
p.panicf("Invalid float %q: '.' must be followed "+
|
|
||||||
"by one or more digits", it.val)
|
|
||||||
}
|
|
||||||
val := strings.Replace(it.val, "_", "", -1)
|
|
||||||
num, err := strconv.ParseFloat(val, 64)
|
|
||||||
if err != nil {
|
|
||||||
if e, ok := err.(*strconv.NumError); ok &&
|
|
||||||
e.Err == strconv.ErrRange {
|
|
||||||
|
|
||||||
p.panicf("Float '%s' is out of the range of 64-bit "+
|
|
||||||
"IEEE-754 floating-point numbers.", it.val)
|
|
||||||
} else {
|
|
||||||
p.panicf("Invalid float value: %q", it.val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return num, p.typeOfPrimitive(it)
|
|
||||||
case itemDatetime:
|
|
||||||
var t time.Time
|
|
||||||
var ok bool
|
|
||||||
var err error
|
|
||||||
for _, format := range []string{
|
|
||||||
"2006-01-02T15:04:05Z07:00",
|
|
||||||
"2006-01-02T15:04:05",
|
|
||||||
"2006-01-02",
|
|
||||||
} {
|
|
||||||
t, err = time.ParseInLocation(format, it.val, time.Local)
|
|
||||||
if err == nil {
|
|
||||||
ok = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !ok {
|
|
||||||
p.panicf("Invalid TOML Datetime: %q.", it.val)
|
|
||||||
}
|
|
||||||
return t, p.typeOfPrimitive(it)
|
|
||||||
case itemArray:
|
|
||||||
array := make([]interface{}, 0)
|
|
||||||
types := make([]tomlType, 0)
|
|
||||||
|
|
||||||
for it = p.next(); it.typ != itemArrayEnd; it = p.next() {
|
|
||||||
if it.typ == itemCommentStart {
|
|
||||||
p.expect(itemText)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
val, typ := p.value(it)
|
|
||||||
array = append(array, val)
|
|
||||||
types = append(types, typ)
|
|
||||||
}
|
|
||||||
return array, p.typeOfArray(types)
|
|
||||||
case itemInlineTableStart:
|
|
||||||
var (
|
|
||||||
hash = make(map[string]interface{})
|
|
||||||
outerContext = p.context
|
|
||||||
outerKey = p.currentKey
|
|
||||||
)
|
|
||||||
|
|
||||||
p.context = append(p.context, p.currentKey)
|
|
||||||
p.currentKey = ""
|
|
||||||
for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() {
|
|
||||||
if it.typ != itemKeyStart {
|
|
||||||
p.bug("Expected key start but instead found %q, around line %d",
|
|
||||||
it.val, p.approxLine)
|
|
||||||
}
|
|
||||||
if it.typ == itemCommentStart {
|
|
||||||
p.expect(itemText)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// retrieve key
|
|
||||||
k := p.next()
|
|
||||||
p.approxLine = k.line
|
|
||||||
kname := p.keyString(k)
|
|
||||||
|
|
||||||
// retrieve value
|
|
||||||
p.currentKey = kname
|
|
||||||
val, typ := p.value(p.next())
|
|
||||||
// make sure we keep metadata up to date
|
|
||||||
p.setType(kname, typ)
|
|
||||||
p.ordered = append(p.ordered, p.context.add(p.currentKey))
|
|
||||||
hash[kname] = val
|
|
||||||
}
|
|
||||||
p.context = outerContext
|
|
||||||
p.currentKey = outerKey
|
|
||||||
return hash, tomlHash
|
|
||||||
}
|
|
||||||
p.bug("Unexpected value type: %s", it.typ)
|
|
||||||
panic("unreachable")
|
|
||||||
}
|
|
||||||
|
|
||||||
// numUnderscoresOK checks whether each underscore in s is surrounded by
|
|
||||||
// characters that are not underscores.
|
|
||||||
func numUnderscoresOK(s string) bool {
|
|
||||||
accept := false
|
|
||||||
for _, r := range s {
|
|
||||||
if r == '_' {
|
|
||||||
if !accept {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
accept = false
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
accept = true
|
|
||||||
}
|
|
||||||
return accept
|
|
||||||
}
|
|
||||||
|
|
||||||
// numPeriodsOK checks whether every period in s is followed by a digit.
|
|
||||||
func numPeriodsOK(s string) bool {
|
|
||||||
period := false
|
|
||||||
for _, r := range s {
|
|
||||||
if period && !isDigit(r) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
period = r == '.'
|
|
||||||
}
|
|
||||||
return !period
|
|
||||||
}
|
|
||||||
|
|
||||||
// establishContext sets the current context of the parser,
|
|
||||||
// where the context is either a hash or an array of hashes. Which one is
|
|
||||||
// set depends on the value of the `array` parameter.
|
|
||||||
//
|
|
||||||
// Establishing the context also makes sure that the key isn't a duplicate, and
|
|
||||||
// will create implicit hashes automatically.
|
|
||||||
func (p *parser) establishContext(key Key, array bool) {
|
|
||||||
var ok bool
|
|
||||||
|
|
||||||
// Always start at the top level and drill down for our context.
|
|
||||||
hashContext := p.mapping
|
|
||||||
keyContext := make(Key, 0)
|
|
||||||
|
|
||||||
// We only need implicit hashes for key[0:-1]
|
|
||||||
for _, k := range key[0 : len(key)-1] {
|
|
||||||
_, ok = hashContext[k]
|
|
||||||
keyContext = append(keyContext, k)
|
|
||||||
|
|
||||||
// No key? Make an implicit hash and move on.
|
|
||||||
if !ok {
|
|
||||||
p.addImplicit(keyContext)
|
|
||||||
hashContext[k] = make(map[string]interface{})
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the hash context is actually an array of tables, then set
|
|
||||||
// the hash context to the last element in that array.
|
|
||||||
//
|
|
||||||
// Otherwise, it better be a table, since this MUST be a key group (by
|
|
||||||
// virtue of it not being the last element in a key).
|
|
||||||
switch t := hashContext[k].(type) {
|
|
||||||
case []map[string]interface{}:
|
|
||||||
hashContext = t[len(t)-1]
|
|
||||||
case map[string]interface{}:
|
|
||||||
hashContext = t
|
|
||||||
default:
|
|
||||||
p.panicf("Key '%s' was already created as a hash.", keyContext)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
p.context = keyContext
|
|
||||||
if array {
|
|
||||||
// If this is the first element for this array, then allocate a new
|
|
||||||
// list of tables for it.
|
|
||||||
k := key[len(key)-1]
|
|
||||||
if _, ok := hashContext[k]; !ok {
|
|
||||||
hashContext[k] = make([]map[string]interface{}, 0, 5)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add a new table. But make sure the key hasn't already been used
|
|
||||||
// for something else.
|
|
||||||
if hash, ok := hashContext[k].([]map[string]interface{}); ok {
|
|
||||||
hashContext[k] = append(hash, make(map[string]interface{}))
|
|
||||||
} else {
|
|
||||||
p.panicf("Key '%s' was already created and cannot be used as "+
|
|
||||||
"an array.", keyContext)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
p.setValue(key[len(key)-1], make(map[string]interface{}))
|
|
||||||
}
|
|
||||||
p.context = append(p.context, key[len(key)-1])
|
|
||||||
}
|
|
||||||
|
|
||||||
// setValue sets the given key to the given value in the current context.
|
|
||||||
// It will make sure that the key hasn't already been defined, account for
|
|
||||||
// implicit key groups.
|
|
||||||
func (p *parser) setValue(key string, value interface{}) {
|
|
||||||
var tmpHash interface{}
|
|
||||||
var ok bool
|
|
||||||
|
|
||||||
hash := p.mapping
|
|
||||||
keyContext := make(Key, 0)
|
|
||||||
for _, k := range p.context {
|
|
||||||
keyContext = append(keyContext, k)
|
|
||||||
if tmpHash, ok = hash[k]; !ok {
|
|
||||||
p.bug("Context for key '%s' has not been established.", keyContext)
|
|
||||||
}
|
|
||||||
switch t := tmpHash.(type) {
|
|
||||||
case []map[string]interface{}:
|
|
||||||
// The context is a table of hashes. Pick the most recent table
|
|
||||||
// defined as the current hash.
|
|
||||||
hash = t[len(t)-1]
|
|
||||||
case map[string]interface{}:
|
|
||||||
hash = t
|
|
||||||
default:
|
|
||||||
p.bug("Expected hash to have type 'map[string]interface{}', but "+
|
|
||||||
"it has '%T' instead.", tmpHash)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
keyContext = append(keyContext, key)
|
|
||||||
|
|
||||||
if _, ok := hash[key]; ok {
|
|
||||||
// Typically, if the given key has already been set, then we have
|
|
||||||
// to raise an error since duplicate keys are disallowed. However,
|
|
||||||
// it's possible that a key was previously defined implicitly. In this
|
|
||||||
// case, it is allowed to be redefined concretely. (See the
|
|
||||||
// `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.)
|
|
||||||
//
|
|
||||||
// But we have to make sure to stop marking it as an implicit. (So that
|
|
||||||
// another redefinition provokes an error.)
|
|
||||||
//
|
|
||||||
// Note that since it has already been defined (as a hash), we don't
|
|
||||||
// want to overwrite it. So our business is done.
|
|
||||||
if p.isImplicit(keyContext) {
|
|
||||||
p.removeImplicit(keyContext)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, we have a concrete key trying to override a previous
|
|
||||||
// key, which is *always* wrong.
|
|
||||||
p.panicf("Key '%s' has already been defined.", keyContext)
|
|
||||||
}
|
|
||||||
hash[key] = value
|
|
||||||
}
|
|
||||||
|
|
||||||
// setType sets the type of a particular value at a given key.
|
|
||||||
// It should be called immediately AFTER setValue.
|
|
||||||
//
|
|
||||||
// Note that if `key` is empty, then the type given will be applied to the
|
|
||||||
// current context (which is either a table or an array of tables).
|
|
||||||
func (p *parser) setType(key string, typ tomlType) {
|
|
||||||
keyContext := make(Key, 0, len(p.context)+1)
|
|
||||||
for _, k := range p.context {
|
|
||||||
keyContext = append(keyContext, k)
|
|
||||||
}
|
|
||||||
if len(key) > 0 { // allow type setting for hashes
|
|
||||||
keyContext = append(keyContext, key)
|
|
||||||
}
|
|
||||||
p.types[keyContext.String()] = typ
|
|
||||||
}
|
|
||||||
|
|
||||||
// addImplicit sets the given Key as having been created implicitly.
|
|
||||||
func (p *parser) addImplicit(key Key) {
|
|
||||||
p.implicits[key.String()] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// removeImplicit stops tagging the given key as having been implicitly
|
|
||||||
// created.
|
|
||||||
func (p *parser) removeImplicit(key Key) {
|
|
||||||
p.implicits[key.String()] = false
|
|
||||||
}
|
|
||||||
|
|
||||||
// isImplicit returns true if the key group pointed to by the key was created
|
|
||||||
// implicitly.
|
|
||||||
func (p *parser) isImplicit(key Key) bool {
|
|
||||||
return p.implicits[key.String()]
|
|
||||||
}
|
|
||||||
|
|
||||||
// current returns the full key name of the current context.
|
|
||||||
func (p *parser) current() string {
|
|
||||||
if len(p.currentKey) == 0 {
|
|
||||||
return p.context.String()
|
|
||||||
}
|
|
||||||
if len(p.context) == 0 {
|
|
||||||
return p.currentKey
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%s.%s", p.context, p.currentKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
func stripFirstNewline(s string) string {
|
|
||||||
if len(s) == 0 || s[0] != '\n' {
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
return s[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
func stripEscapedWhitespace(s string) string {
|
|
||||||
esc := strings.Split(s, "\\\n")
|
|
||||||
if len(esc) > 1 {
|
|
||||||
for i := 1; i < len(esc); i++ {
|
|
||||||
esc[i] = strings.TrimLeftFunc(esc[i], unicode.IsSpace)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return strings.Join(esc, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) replaceEscapes(str string) string {
|
|
||||||
var replaced []rune
|
|
||||||
s := []byte(str)
|
|
||||||
r := 0
|
|
||||||
for r < len(s) {
|
|
||||||
if s[r] != '\\' {
|
|
||||||
c, size := utf8.DecodeRune(s[r:])
|
|
||||||
r += size
|
|
||||||
replaced = append(replaced, c)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
r += 1
|
|
||||||
if r >= len(s) {
|
|
||||||
p.bug("Escape sequence at end of string.")
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
switch s[r] {
|
|
||||||
default:
|
|
||||||
p.bug("Expected valid escape code after \\, but got %q.", s[r])
|
|
||||||
return ""
|
|
||||||
case 'b':
|
|
||||||
replaced = append(replaced, rune(0x0008))
|
|
||||||
r += 1
|
|
||||||
case 't':
|
|
||||||
replaced = append(replaced, rune(0x0009))
|
|
||||||
r += 1
|
|
||||||
case 'n':
|
|
||||||
replaced = append(replaced, rune(0x000A))
|
|
||||||
r += 1
|
|
||||||
case 'f':
|
|
||||||
replaced = append(replaced, rune(0x000C))
|
|
||||||
r += 1
|
|
||||||
case 'r':
|
|
||||||
replaced = append(replaced, rune(0x000D))
|
|
||||||
r += 1
|
|
||||||
case '"':
|
|
||||||
replaced = append(replaced, rune(0x0022))
|
|
||||||
r += 1
|
|
||||||
case '\\':
|
|
||||||
replaced = append(replaced, rune(0x005C))
|
|
||||||
r += 1
|
|
||||||
case 'u':
|
|
||||||
// At this point, we know we have a Unicode escape of the form
|
|
||||||
// `uXXXX` at [r, r+5). (Because the lexer guarantees this
|
|
||||||
// for us.)
|
|
||||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+5])
|
|
||||||
replaced = append(replaced, escaped)
|
|
||||||
r += 5
|
|
||||||
case 'U':
|
|
||||||
// At this point, we know we have a Unicode escape of the form
|
|
||||||
// `uXXXX` at [r, r+9). (Because the lexer guarantees this
|
|
||||||
// for us.)
|
|
||||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+9])
|
|
||||||
replaced = append(replaced, escaped)
|
|
||||||
r += 9
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return string(replaced)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) asciiEscapeToUnicode(bs []byte) rune {
|
|
||||||
s := string(bs)
|
|
||||||
hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32)
|
|
||||||
if err != nil {
|
|
||||||
p.bug("Could not parse '%s' as a hexadecimal number, but the "+
|
|
||||||
"lexer claims it's OK: %s", s, err)
|
|
||||||
}
|
|
||||||
if !utf8.ValidRune(rune(hex)) {
|
|
||||||
p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s)
|
|
||||||
}
|
|
||||||
return rune(hex)
|
|
||||||
}
|
|
||||||
|
|
||||||
func isStringType(ty itemType) bool {
|
|
||||||
return ty == itemString || ty == itemMultilineString ||
|
|
||||||
ty == itemRawString || ty == itemRawMultilineString
|
|
||||||
}
|
|
1
vendor/github.com/BurntSushi/toml/session.vim
generated
vendored
1
vendor/github.com/BurntSushi/toml/session.vim
generated
vendored
@ -1 +0,0 @@
|
|||||||
au BufWritePost *.go silent!make tags > /dev/null 2>&1
|
|
91
vendor/github.com/BurntSushi/toml/type_check.go
generated
vendored
91
vendor/github.com/BurntSushi/toml/type_check.go
generated
vendored
@ -1,91 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
// tomlType represents any Go type that corresponds to a TOML type.
|
|
||||||
// While the first draft of the TOML spec has a simplistic type system that
|
|
||||||
// probably doesn't need this level of sophistication, we seem to be militating
|
|
||||||
// toward adding real composite types.
|
|
||||||
type tomlType interface {
|
|
||||||
typeString() string
|
|
||||||
}
|
|
||||||
|
|
||||||
// typeEqual accepts any two types and returns true if they are equal.
|
|
||||||
func typeEqual(t1, t2 tomlType) bool {
|
|
||||||
if t1 == nil || t2 == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return t1.typeString() == t2.typeString()
|
|
||||||
}
|
|
||||||
|
|
||||||
func typeIsHash(t tomlType) bool {
|
|
||||||
return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash)
|
|
||||||
}
|
|
||||||
|
|
||||||
type tomlBaseType string
|
|
||||||
|
|
||||||
func (btype tomlBaseType) typeString() string {
|
|
||||||
return string(btype)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (btype tomlBaseType) String() string {
|
|
||||||
return btype.typeString()
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
tomlInteger tomlBaseType = "Integer"
|
|
||||||
tomlFloat tomlBaseType = "Float"
|
|
||||||
tomlDatetime tomlBaseType = "Datetime"
|
|
||||||
tomlString tomlBaseType = "String"
|
|
||||||
tomlBool tomlBaseType = "Bool"
|
|
||||||
tomlArray tomlBaseType = "Array"
|
|
||||||
tomlHash tomlBaseType = "Hash"
|
|
||||||
tomlArrayHash tomlBaseType = "ArrayHash"
|
|
||||||
)
|
|
||||||
|
|
||||||
// typeOfPrimitive returns a tomlType of any primitive value in TOML.
|
|
||||||
// Primitive values are: Integer, Float, Datetime, String and Bool.
|
|
||||||
//
|
|
||||||
// Passing a lexer item other than the following will cause a BUG message
|
|
||||||
// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime.
|
|
||||||
func (p *parser) typeOfPrimitive(lexItem item) tomlType {
|
|
||||||
switch lexItem.typ {
|
|
||||||
case itemInteger:
|
|
||||||
return tomlInteger
|
|
||||||
case itemFloat:
|
|
||||||
return tomlFloat
|
|
||||||
case itemDatetime:
|
|
||||||
return tomlDatetime
|
|
||||||
case itemString:
|
|
||||||
return tomlString
|
|
||||||
case itemMultilineString:
|
|
||||||
return tomlString
|
|
||||||
case itemRawString:
|
|
||||||
return tomlString
|
|
||||||
case itemRawMultilineString:
|
|
||||||
return tomlString
|
|
||||||
case itemBool:
|
|
||||||
return tomlBool
|
|
||||||
}
|
|
||||||
p.bug("Cannot infer primitive type of lex item '%s'.", lexItem)
|
|
||||||
panic("unreachable")
|
|
||||||
}
|
|
||||||
|
|
||||||
// typeOfArray returns a tomlType for an array given a list of types of its
|
|
||||||
// values.
|
|
||||||
//
|
|
||||||
// In the current spec, if an array is homogeneous, then its type is always
|
|
||||||
// "Array". If the array is not homogeneous, an error is generated.
|
|
||||||
func (p *parser) typeOfArray(types []tomlType) tomlType {
|
|
||||||
// Empty arrays are cool.
|
|
||||||
if len(types) == 0 {
|
|
||||||
return tomlArray
|
|
||||||
}
|
|
||||||
|
|
||||||
theType := types[0]
|
|
||||||
for _, t := range types[1:] {
|
|
||||||
if !typeEqual(theType, t) {
|
|
||||||
p.panicf("Array contains values of type '%s' and '%s', but "+
|
|
||||||
"arrays must be homogeneous.", theType, t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return tomlArray
|
|
||||||
}
|
|
242
vendor/github.com/BurntSushi/toml/type_fields.go
generated
vendored
242
vendor/github.com/BurntSushi/toml/type_fields.go
generated
vendored
@ -1,242 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
// Struct field handling is adapted from code in encoding/json:
|
|
||||||
//
|
|
||||||
// Copyright 2010 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the Go distribution.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"sort"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A field represents a single field found in a struct.
|
|
||||||
type field struct {
|
|
||||||
name string // the name of the field (`toml` tag included)
|
|
||||||
tag bool // whether field has a `toml` tag
|
|
||||||
index []int // represents the depth of an anonymous field
|
|
||||||
typ reflect.Type // the type of the field
|
|
||||||
}
|
|
||||||
|
|
||||||
// byName sorts field by name, breaking ties with depth,
|
|
||||||
// then breaking ties with "name came from toml tag", then
|
|
||||||
// breaking ties with index sequence.
|
|
||||||
type byName []field
|
|
||||||
|
|
||||||
func (x byName) Len() int { return len(x) }
|
|
||||||
|
|
||||||
func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
|
||||||
|
|
||||||
func (x byName) Less(i, j int) bool {
|
|
||||||
if x[i].name != x[j].name {
|
|
||||||
return x[i].name < x[j].name
|
|
||||||
}
|
|
||||||
if len(x[i].index) != len(x[j].index) {
|
|
||||||
return len(x[i].index) < len(x[j].index)
|
|
||||||
}
|
|
||||||
if x[i].tag != x[j].tag {
|
|
||||||
return x[i].tag
|
|
||||||
}
|
|
||||||
return byIndex(x).Less(i, j)
|
|
||||||
}
|
|
||||||
|
|
||||||
// byIndex sorts field by index sequence.
|
|
||||||
type byIndex []field
|
|
||||||
|
|
||||||
func (x byIndex) Len() int { return len(x) }
|
|
||||||
|
|
||||||
func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
|
||||||
|
|
||||||
func (x byIndex) Less(i, j int) bool {
|
|
||||||
for k, xik := range x[i].index {
|
|
||||||
if k >= len(x[j].index) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if xik != x[j].index[k] {
|
|
||||||
return xik < x[j].index[k]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return len(x[i].index) < len(x[j].index)
|
|
||||||
}
|
|
||||||
|
|
||||||
// typeFields returns a list of fields that TOML should recognize for the given
|
|
||||||
// type. The algorithm is breadth-first search over the set of structs to
|
|
||||||
// include - the top struct and then any reachable anonymous structs.
|
|
||||||
func typeFields(t reflect.Type) []field {
|
|
||||||
// Anonymous fields to explore at the current level and the next.
|
|
||||||
current := []field{}
|
|
||||||
next := []field{{typ: t}}
|
|
||||||
|
|
||||||
// Count of queued names for current level and the next.
|
|
||||||
count := map[reflect.Type]int{}
|
|
||||||
nextCount := map[reflect.Type]int{}
|
|
||||||
|
|
||||||
// Types already visited at an earlier level.
|
|
||||||
visited := map[reflect.Type]bool{}
|
|
||||||
|
|
||||||
// Fields found.
|
|
||||||
var fields []field
|
|
||||||
|
|
||||||
for len(next) > 0 {
|
|
||||||
current, next = next, current[:0]
|
|
||||||
count, nextCount = nextCount, map[reflect.Type]int{}
|
|
||||||
|
|
||||||
for _, f := range current {
|
|
||||||
if visited[f.typ] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
visited[f.typ] = true
|
|
||||||
|
|
||||||
// Scan f.typ for fields to include.
|
|
||||||
for i := 0; i < f.typ.NumField(); i++ {
|
|
||||||
sf := f.typ.Field(i)
|
|
||||||
if sf.PkgPath != "" && !sf.Anonymous { // unexported
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
opts := getOptions(sf.Tag)
|
|
||||||
if opts.skip {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
index := make([]int, len(f.index)+1)
|
|
||||||
copy(index, f.index)
|
|
||||||
index[len(f.index)] = i
|
|
||||||
|
|
||||||
ft := sf.Type
|
|
||||||
if ft.Name() == "" && ft.Kind() == reflect.Ptr {
|
|
||||||
// Follow pointer.
|
|
||||||
ft = ft.Elem()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Record found field and index sequence.
|
|
||||||
if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
|
||||||
tagged := opts.name != ""
|
|
||||||
name := opts.name
|
|
||||||
if name == "" {
|
|
||||||
name = sf.Name
|
|
||||||
}
|
|
||||||
fields = append(fields, field{name, tagged, index, ft})
|
|
||||||
if count[f.typ] > 1 {
|
|
||||||
// If there were multiple instances, add a second,
|
|
||||||
// so that the annihilation code will see a duplicate.
|
|
||||||
// It only cares about the distinction between 1 or 2,
|
|
||||||
// so don't bother generating any more copies.
|
|
||||||
fields = append(fields, fields[len(fields)-1])
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Record new anonymous struct to explore in next round.
|
|
||||||
nextCount[ft]++
|
|
||||||
if nextCount[ft] == 1 {
|
|
||||||
f := field{name: ft.Name(), index: index, typ: ft}
|
|
||||||
next = append(next, f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(byName(fields))
|
|
||||||
|
|
||||||
// Delete all fields that are hidden by the Go rules for embedded fields,
|
|
||||||
// except that fields with TOML tags are promoted.
|
|
||||||
|
|
||||||
// The fields are sorted in primary order of name, secondary order
|
|
||||||
// of field index length. Loop over names; for each name, delete
|
|
||||||
// hidden fields by choosing the one dominant field that survives.
|
|
||||||
out := fields[:0]
|
|
||||||
for advance, i := 0, 0; i < len(fields); i += advance {
|
|
||||||
// One iteration per name.
|
|
||||||
// Find the sequence of fields with the name of this first field.
|
|
||||||
fi := fields[i]
|
|
||||||
name := fi.name
|
|
||||||
for advance = 1; i+advance < len(fields); advance++ {
|
|
||||||
fj := fields[i+advance]
|
|
||||||
if fj.name != name {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if advance == 1 { // Only one field with this name
|
|
||||||
out = append(out, fi)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
dominant, ok := dominantField(fields[i : i+advance])
|
|
||||||
if ok {
|
|
||||||
out = append(out, dominant)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fields = out
|
|
||||||
sort.Sort(byIndex(fields))
|
|
||||||
|
|
||||||
return fields
|
|
||||||
}
|
|
||||||
|
|
||||||
// dominantField looks through the fields, all of which are known to
|
|
||||||
// have the same name, to find the single field that dominates the
|
|
||||||
// others using Go's embedding rules, modified by the presence of
|
|
||||||
// TOML tags. If there are multiple top-level fields, the boolean
|
|
||||||
// will be false: This condition is an error in Go and we skip all
|
|
||||||
// the fields.
|
|
||||||
func dominantField(fields []field) (field, bool) {
|
|
||||||
// The fields are sorted in increasing index-length order. The winner
|
|
||||||
// must therefore be one with the shortest index length. Drop all
|
|
||||||
// longer entries, which is easy: just truncate the slice.
|
|
||||||
length := len(fields[0].index)
|
|
||||||
tagged := -1 // Index of first tagged field.
|
|
||||||
for i, f := range fields {
|
|
||||||
if len(f.index) > length {
|
|
||||||
fields = fields[:i]
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if f.tag {
|
|
||||||
if tagged >= 0 {
|
|
||||||
// Multiple tagged fields at the same level: conflict.
|
|
||||||
// Return no field.
|
|
||||||
return field{}, false
|
|
||||||
}
|
|
||||||
tagged = i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if tagged >= 0 {
|
|
||||||
return fields[tagged], true
|
|
||||||
}
|
|
||||||
// All remaining fields have the same length. If there's more than one,
|
|
||||||
// we have a conflict (two fields named "X" at the same level) and we
|
|
||||||
// return no field.
|
|
||||||
if len(fields) > 1 {
|
|
||||||
return field{}, false
|
|
||||||
}
|
|
||||||
return fields[0], true
|
|
||||||
}
|
|
||||||
|
|
||||||
var fieldCache struct {
|
|
||||||
sync.RWMutex
|
|
||||||
m map[reflect.Type][]field
|
|
||||||
}
|
|
||||||
|
|
||||||
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
|
|
||||||
func cachedTypeFields(t reflect.Type) []field {
|
|
||||||
fieldCache.RLock()
|
|
||||||
f := fieldCache.m[t]
|
|
||||||
fieldCache.RUnlock()
|
|
||||||
if f != nil {
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compute fields without lock.
|
|
||||||
// Might duplicate effort but won't hold other computations back.
|
|
||||||
f = typeFields(t)
|
|
||||||
if f == nil {
|
|
||||||
f = []field{}
|
|
||||||
}
|
|
||||||
|
|
||||||
fieldCache.Lock()
|
|
||||||
if fieldCache.m == nil {
|
|
||||||
fieldCache.m = map[reflect.Type][]field{}
|
|
||||||
}
|
|
||||||
fieldCache.m[t] = f
|
|
||||||
fieldCache.Unlock()
|
|
||||||
return f
|
|
||||||
}
|
|
16
vendor/github.com/KyleBanks/depth/.gitignore
generated
vendored
16
vendor/github.com/KyleBanks/depth/.gitignore
generated
vendored
@ -1,16 +0,0 @@
|
|||||||
# Binaries for programs and plugins
|
|
||||||
*.exe
|
|
||||||
*.dll
|
|
||||||
*.so
|
|
||||||
*.dylib
|
|
||||||
|
|
||||||
# Test binary, build with `go test -c`
|
|
||||||
*.test
|
|
||||||
|
|
||||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
|
||||||
*.out
|
|
||||||
|
|
||||||
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
|
|
||||||
.glide/
|
|
||||||
|
|
||||||
bin/
|
|
9
vendor/github.com/KyleBanks/depth/.travis.yml
generated
vendored
9
vendor/github.com/KyleBanks/depth/.travis.yml
generated
vendored
@ -1,9 +0,0 @@
|
|||||||
language: go
|
|
||||||
sudo: false
|
|
||||||
go:
|
|
||||||
- 1.9.x
|
|
||||||
before_install:
|
|
||||||
- go get github.com/mattn/goveralls
|
|
||||||
script:
|
|
||||||
- $HOME/gopath/bin/goveralls -service=travis-ci
|
|
||||||
#script: go test $(go list ./... | grep -v vendor/)
|
|
21
vendor/github.com/KyleBanks/depth/LICENSE
generated
vendored
21
vendor/github.com/KyleBanks/depth/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2017 Kyle Banks
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
32
vendor/github.com/KyleBanks/depth/Makefile
generated
vendored
32
vendor/github.com/KyleBanks/depth/Makefile
generated
vendored
@ -1,32 +0,0 @@
|
|||||||
VERSION = 1.2.1
|
|
||||||
|
|
||||||
RELEASE_PKG = ./cmd/depth
|
|
||||||
INSTALL_PKG = $(RELEASE_PKG)
|
|
||||||
|
|
||||||
|
|
||||||
# Remote includes require 'mmake'
|
|
||||||
# github.com/tj/mmake
|
|
||||||
include github.com/KyleBanks/make/go/install
|
|
||||||
include github.com/KyleBanks/make/go/sanity
|
|
||||||
include github.com/KyleBanks/make/go/release
|
|
||||||
include github.com/KyleBanks/make/go/bench
|
|
||||||
include github.com/KyleBanks/make/git/precommit
|
|
||||||
|
|
||||||
# Runs a number of depth commands as examples of what's possible.
|
|
||||||
example: | install
|
|
||||||
depth github.com/KyleBanks/depth/cmd/depth strings ./
|
|
||||||
|
|
||||||
depth -internal strings
|
|
||||||
|
|
||||||
depth -json github.com/KyleBanks/depth/cmd/depth
|
|
||||||
|
|
||||||
depth -test github.com/KyleBanks/depth/cmd/depth
|
|
||||||
|
|
||||||
depth -test -internal strings
|
|
||||||
|
|
||||||
depth -test -internal -max 3 strings
|
|
||||||
|
|
||||||
depth .
|
|
||||||
|
|
||||||
depth ./cmd/depth
|
|
||||||
.PHONY: example
|
|
232
vendor/github.com/KyleBanks/depth/README.md
generated
vendored
232
vendor/github.com/KyleBanks/depth/README.md
generated
vendored
@ -1,232 +0,0 @@
|
|||||||
# depth
|
|
||||||
|
|
||||||
[](https://godoc.org/github.com/KyleBanks/depth)
|
|
||||||
[](https://travis-ci.org/KyleBanks/depth)
|
|
||||||
[](https://goreportcard.com/report/github.com/KyleBanks/depth)
|
|
||||||
[](https://coveralls.io/github/KyleBanks/depth?branch=master)
|
|
||||||
|
|
||||||
`depth` is tool to retrieve and visualize Go source code dependency trees.
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
Download the appropriate binary for your platform from the [Releases](https://github.com/KyleBanks/depth/releases) page, or:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
go get github.com/KyleBanks/depth/cmd/depth
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
`depth` can be used as a standalone command-line application, or as a package within your own project.
|
|
||||||
|
|
||||||
### Command-Line
|
|
||||||
|
|
||||||
Simply execute `depth` with one or more package names to visualize. You can use the fully qualified import path of the package, like so:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ depth github.com/KyleBanks/depth/cmd/depth
|
|
||||||
github.com/KyleBanks/depth/cmd/depth
|
|
||||||
├ encoding/json
|
|
||||||
├ flag
|
|
||||||
├ fmt
|
|
||||||
├ io
|
|
||||||
├ log
|
|
||||||
├ os
|
|
||||||
├ strings
|
|
||||||
└ github.com/KyleBanks/depth
|
|
||||||
├ fmt
|
|
||||||
├ go/build
|
|
||||||
├ path
|
|
||||||
├ sort
|
|
||||||
└ strings
|
|
||||||
12 dependencies (11 internal, 1 external, 0 testing).
|
|
||||||
```
|
|
||||||
|
|
||||||
Or you can use a relative path, for example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ depth .
|
|
||||||
$ depth ./cmd/depth
|
|
||||||
$ depth ../
|
|
||||||
```
|
|
||||||
|
|
||||||
You can also use `depth` on the Go standard library:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ depth strings
|
|
||||||
strings
|
|
||||||
├ errors
|
|
||||||
├ io
|
|
||||||
├ unicode
|
|
||||||
└ unicode/utf8
|
|
||||||
5 dependencies (5 internal, 0 external, 0 testing).
|
|
||||||
```
|
|
||||||
|
|
||||||
Visualizing multiple packages at a time is supported by simply naming the packages you'd like to visualize:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ depth strings github.com/KyleBanks/depth
|
|
||||||
strings
|
|
||||||
├ errors
|
|
||||||
├ io
|
|
||||||
├ unicode
|
|
||||||
└ unicode/utf8
|
|
||||||
5 dependencies (5 internal, 0 external, 0 testing).
|
|
||||||
github.com/KyleBanks/depth
|
|
||||||
├ fmt
|
|
||||||
├ go/build
|
|
||||||
├ path
|
|
||||||
├ sort
|
|
||||||
└ strings
|
|
||||||
7 dependencies (7 internal, 0 external, 0 testing).
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `-internal`
|
|
||||||
|
|
||||||
By default, `depth` only resolves the top level of dependencies for standard library packages, however you can use the `-internal` flag to visualize all internal dependencies:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ depth -internal strings
|
|
||||||
strings
|
|
||||||
├ errors
|
|
||||||
├ io
|
|
||||||
├ errors
|
|
||||||
└ sync
|
|
||||||
├ internal/race
|
|
||||||
└ unsafe
|
|
||||||
├ runtime
|
|
||||||
├ runtime/internal/atomic
|
|
||||||
└ unsafe
|
|
||||||
├ runtime/internal/sys
|
|
||||||
└ unsafe
|
|
||||||
├ sync/atomic
|
|
||||||
└ unsafe
|
|
||||||
└ unsafe
|
|
||||||
├ unicode
|
|
||||||
└ unicode/utf8
|
|
||||||
12 dependencies (12 internal, 0 external, 0 testing).
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `-max`
|
|
||||||
|
|
||||||
The `-max` flag limits the dependency tree to the maximum depth provided. For example, if you supply `-max 1` on the `depth` package, your output would look like so:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ depth -max 1 github.com/KyleBanks/depth/cmd/depth
|
|
||||||
github.com/KyleBanks/depth/cmd/depth
|
|
||||||
├ encoding/json
|
|
||||||
├ flag
|
|
||||||
├ fmt
|
|
||||||
├ io
|
|
||||||
├ log
|
|
||||||
├ os
|
|
||||||
├ strings
|
|
||||||
└ github.com/KyleBanks/depth
|
|
||||||
7 dependencies (6 internal, 1 external, 0 testing).
|
|
||||||
```
|
|
||||||
|
|
||||||
The `-max` flag is particularly useful in conjunction with the `-internal` flag which can lead to very deep dependency trees.
|
|
||||||
|
|
||||||
#### `-test`
|
|
||||||
|
|
||||||
By default, `depth` ignores dependencies that are only required for testing. However, you can view test dependencies using the `-test` flag:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ depth -test strings
|
|
||||||
strings
|
|
||||||
├ bytes
|
|
||||||
├ errors
|
|
||||||
├ fmt
|
|
||||||
├ io
|
|
||||||
├ io/ioutil
|
|
||||||
├ math/rand
|
|
||||||
├ reflect
|
|
||||||
├ sync
|
|
||||||
├ testing
|
|
||||||
├ unicode
|
|
||||||
├ unicode/utf8
|
|
||||||
└ unsafe
|
|
||||||
13 dependencies (13 internal, 0 external, 8 testing).
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `-explain target-package`
|
|
||||||
|
|
||||||
The `-explain` flag instructs `depth` to print import chains in which the
|
|
||||||
`target-package` is found:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ depth -explain strings github.com/KyleBanks/depth/cmd/depth
|
|
||||||
github.com/KyleBanks/depth/cmd/depth -> strings
|
|
||||||
github.com/KyleBanks/depth/cmd/depth -> github.com/KyleBanks/depth -> strings
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `-json`
|
|
||||||
|
|
||||||
The `-json` flag instructs `depth` to output dependencies in JSON format:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ depth -json github.com/KyleBanks/depth/cmd/depth
|
|
||||||
{
|
|
||||||
"name": "github.com/KyleBanks/depth/cmd/depth",
|
|
||||||
"deps": [
|
|
||||||
{
|
|
||||||
"name": "encoding/json",
|
|
||||||
"internal": true,
|
|
||||||
"deps": null
|
|
||||||
},
|
|
||||||
...
|
|
||||||
{
|
|
||||||
"name": "github.com/KyleBanks/depth",
|
|
||||||
"internal": false,
|
|
||||||
"deps": [
|
|
||||||
{
|
|
||||||
"name": "go/build",
|
|
||||||
"internal": true,
|
|
||||||
"deps": null
|
|
||||||
},
|
|
||||||
...
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Integrating With Your Project
|
|
||||||
|
|
||||||
The `depth` package can easily be used to retrieve the dependency tree for a particular package in your own project. For example, here's how you would retrieve the dependency tree for the `strings` package:
|
|
||||||
|
|
||||||
```go
|
|
||||||
import "github.com/KyleBanks/depth"
|
|
||||||
|
|
||||||
var t depth.Tree
|
|
||||||
err := t.Resolve("strings")
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Output: "'strings' has 4 dependencies."
|
|
||||||
log.Printf("'%v' has %v dependencies.", t.Root.Name, len(t.Root.Deps))
|
|
||||||
```
|
|
||||||
|
|
||||||
For additional customization, simply set the appropriate flags on the `Tree` before resolving:
|
|
||||||
|
|
||||||
```go
|
|
||||||
import "github.com/KyleBanks/depth"
|
|
||||||
|
|
||||||
t := depth.Tree {
|
|
||||||
ResolveInternal: true,
|
|
||||||
ResolveTest: true,
|
|
||||||
MaxDepth: 10,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
err := t.Resolve("strings")
|
|
||||||
```
|
|
||||||
|
|
||||||
## Author
|
|
||||||
|
|
||||||
`depth` was developed by [Kyle Banks](https://twitter.com/kylewbanks).
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
`depth` is available under the [MIT](./LICENSE) license.
|
|
129
vendor/github.com/KyleBanks/depth/depth.go
generated
vendored
129
vendor/github.com/KyleBanks/depth/depth.go
generated
vendored
@ -1,129 +0,0 @@
|
|||||||
// Package depth provides the ability to traverse and retrieve Go source code dependencies in the form of
|
|
||||||
// internal and external packages.
|
|
||||||
//
|
|
||||||
// For example, the dependencies of the stdlib `strings` package can be resolved like so:
|
|
||||||
//
|
|
||||||
// import "github.com/KyleBanks/depth"
|
|
||||||
//
|
|
||||||
// var t depth.Tree
|
|
||||||
// err := t.Resolve("strings")
|
|
||||||
// if err != nil {
|
|
||||||
// log.Fatal(err)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// // Output: "strings has 4 dependencies."
|
|
||||||
// log.Printf("%v has %v dependencies.", t.Root.Name, len(t.Root.Deps))
|
|
||||||
//
|
|
||||||
// For additional customization, simply set the appropriate flags on the `Tree` before resolving:
|
|
||||||
//
|
|
||||||
// import "github.com/KyleBanks/depth"
|
|
||||||
//
|
|
||||||
// t := depth.Tree {
|
|
||||||
// ResolveInternal: true,
|
|
||||||
// ResolveTest: true,
|
|
||||||
// MaxDepth: 10,
|
|
||||||
// }
|
|
||||||
// err := t.Resolve("strings")
|
|
||||||
package depth
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"go/build"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ErrRootPkgNotResolved is returned when the root Pkg of the Tree cannot be resolved,
|
|
||||||
// typically because it does not exist.
|
|
||||||
var ErrRootPkgNotResolved = errors.New("unable to resolve root package")
|
|
||||||
|
|
||||||
// Importer defines a type that can import a package and return its details.
|
|
||||||
type Importer interface {
|
|
||||||
Import(name, srcDir string, im build.ImportMode) (*build.Package, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tree represents the top level of a Pkg and the configuration used to
|
|
||||||
// initialize and represent its contents.
|
|
||||||
type Tree struct {
|
|
||||||
Root *Pkg
|
|
||||||
|
|
||||||
ResolveInternal bool
|
|
||||||
ResolveTest bool
|
|
||||||
MaxDepth int
|
|
||||||
|
|
||||||
Importer Importer
|
|
||||||
|
|
||||||
importCache map[string]struct{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve recursively finds all dependencies for the root Pkg name provided,
|
|
||||||
// and the packages it depends on.
|
|
||||||
func (t *Tree) Resolve(name string) error {
|
|
||||||
pwd, err := os.Getwd()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Root = &Pkg{
|
|
||||||
Name: name,
|
|
||||||
Tree: t,
|
|
||||||
SrcDir: pwd,
|
|
||||||
Test: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset the import cache each time to ensure a reused Tree doesn't
|
|
||||||
// reuse the same cache.
|
|
||||||
t.importCache = nil
|
|
||||||
|
|
||||||
// Allow custom importers, but use build.Default if none is provided.
|
|
||||||
if t.Importer == nil {
|
|
||||||
t.Importer = &build.Default
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Root.Resolve(t.Importer)
|
|
||||||
if !t.Root.Resolved {
|
|
||||||
return ErrRootPkgNotResolved
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// shouldResolveInternal determines if internal packages should be further resolved beyond the
|
|
||||||
// current parent.
|
|
||||||
//
|
|
||||||
// For example, if the parent Pkg is `github.com/foo/bar` and true is returned, all the
|
|
||||||
// internal dependencies it relies on will be resolved. If for example `strings` is one of those
|
|
||||||
// dependencies, and it is passed as the parent here, false may be returned and its internal
|
|
||||||
// dependencies will not be resolved.
|
|
||||||
func (t *Tree) shouldResolveInternal(parent *Pkg) bool {
|
|
||||||
if t.ResolveInternal {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return parent == t.Root
|
|
||||||
}
|
|
||||||
|
|
||||||
// isAtMaxDepth returns true when the depth of the Pkg provided is at or beyond the maximum
|
|
||||||
// depth allowed by the tree.
|
|
||||||
//
|
|
||||||
// If the Tree has a MaxDepth of zero, true is never returned.
|
|
||||||
func (t *Tree) isAtMaxDepth(p *Pkg) bool {
|
|
||||||
if t.MaxDepth == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return p.depth() >= t.MaxDepth
|
|
||||||
}
|
|
||||||
|
|
||||||
// hasSeenImport returns true if the import name provided has already been seen within the tree.
|
|
||||||
// This function only returns false for a name once.
|
|
||||||
func (t *Tree) hasSeenImport(name string) bool {
|
|
||||||
if t.importCache == nil {
|
|
||||||
t.importCache = make(map[string]struct{})
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := t.importCache[name]; ok {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
t.importCache[name] = struct{}{}
|
|
||||||
return false
|
|
||||||
}
|
|
184
vendor/github.com/KyleBanks/depth/pkg.go
generated
vendored
184
vendor/github.com/KyleBanks/depth/pkg.go
generated
vendored
@ -1,184 +0,0 @@
|
|||||||
package depth
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"go/build"
|
|
||||||
"path"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Pkg represents a Go source package, and its dependencies.
|
|
||||||
type Pkg struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
SrcDir string `json:"-"`
|
|
||||||
|
|
||||||
Internal bool `json:"internal"`
|
|
||||||
Resolved bool `json:"resolved"`
|
|
||||||
Test bool `json:"-"`
|
|
||||||
|
|
||||||
Tree *Tree `json:"-"`
|
|
||||||
Parent *Pkg `json:"-"`
|
|
||||||
Deps []Pkg `json:"deps"`
|
|
||||||
|
|
||||||
Raw *build.Package `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve recursively finds all dependencies for the Pkg and the packages it depends on.
|
|
||||||
func (p *Pkg) Resolve(i Importer) {
|
|
||||||
// Resolved is always true, regardless of if we skip the import,
|
|
||||||
// it is only false if there is an error while importing.
|
|
||||||
p.Resolved = true
|
|
||||||
|
|
||||||
name := p.cleanName()
|
|
||||||
if name == "" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop resolving imports if we've reached max depth or found a duplicate.
|
|
||||||
var importMode build.ImportMode
|
|
||||||
if p.Tree.hasSeenImport(name) || p.Tree.isAtMaxDepth(p) {
|
|
||||||
importMode = build.FindOnly
|
|
||||||
}
|
|
||||||
|
|
||||||
pkg, err := i.Import(name, p.SrcDir, importMode)
|
|
||||||
if err != nil {
|
|
||||||
// TODO: Check the error type?
|
|
||||||
p.Resolved = false
|
|
||||||
return
|
|
||||||
}
|
|
||||||
p.Raw = pkg
|
|
||||||
|
|
||||||
// Update the name with the fully qualified import path.
|
|
||||||
p.Name = pkg.ImportPath
|
|
||||||
|
|
||||||
// If this is an internal dependency, we may need to skip it.
|
|
||||||
if pkg.Goroot {
|
|
||||||
p.Internal = true
|
|
||||||
if !p.Tree.shouldResolveInternal(p) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//first we set the regular dependencies, then we add the test dependencies
|
|
||||||
//sharing the same set. This allows us to mark all test-only deps linearly
|
|
||||||
unique := make(map[string]struct{})
|
|
||||||
p.setDeps(i, pkg.Imports, pkg.Dir, unique, false)
|
|
||||||
if p.Tree.ResolveTest {
|
|
||||||
p.setDeps(i, append(pkg.TestImports, pkg.XTestImports...), pkg.Dir, unique, true)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// setDeps takes a slice of import paths and the source directory they are relative to,
|
|
||||||
// and creates the Deps of the Pkg. Each dependency is also further resolved prior to being added
|
|
||||||
// to the Pkg.
|
|
||||||
func (p *Pkg) setDeps(i Importer, imports []string, srcDir string, unique map[string]struct{}, isTest bool) {
|
|
||||||
for _, imp := range imports {
|
|
||||||
// Mostly for testing files where cyclic imports are allowed.
|
|
||||||
if imp == p.Name {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip duplicates.
|
|
||||||
if _, ok := unique[imp]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
unique[imp] = struct{}{}
|
|
||||||
|
|
||||||
p.addDep(i, imp, srcDir, isTest)
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(byInternalAndName(p.Deps))
|
|
||||||
}
|
|
||||||
|
|
||||||
// addDep creates a Pkg and it's dependencies from an imported package name.
|
|
||||||
func (p *Pkg) addDep(i Importer, name string, srcDir string, isTest bool) {
|
|
||||||
dep := Pkg{
|
|
||||||
Name: name,
|
|
||||||
SrcDir: srcDir,
|
|
||||||
Tree: p.Tree,
|
|
||||||
Parent: p,
|
|
||||||
Test: isTest,
|
|
||||||
}
|
|
||||||
dep.Resolve(i)
|
|
||||||
|
|
||||||
p.Deps = append(p.Deps, dep)
|
|
||||||
}
|
|
||||||
|
|
||||||
// isParent goes recursively up the chain of Pkgs to determine if the name provided is ever a
|
|
||||||
// parent of the current Pkg.
|
|
||||||
func (p *Pkg) isParent(name string) bool {
|
|
||||||
if p.Parent == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.Parent.Name == name {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return p.Parent.isParent(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
// depth returns the depth of the Pkg within the Tree.
|
|
||||||
func (p *Pkg) depth() int {
|
|
||||||
if p.Parent == nil {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
return p.Parent.depth() + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
// cleanName returns a cleaned version of the Pkg name used for resolving dependencies.
|
|
||||||
//
|
|
||||||
// If an empty string is returned, dependencies should not be resolved.
|
|
||||||
func (p *Pkg) cleanName() string {
|
|
||||||
name := p.Name
|
|
||||||
|
|
||||||
// C 'package' cannot be resolved.
|
|
||||||
if name == "C" {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Internal golang_org/* packages must be prefixed with vendor/
|
|
||||||
//
|
|
||||||
// Thanks to @davecheney for this:
|
|
||||||
// https://github.com/davecheney/graphpkg/blob/master/main.go#L46
|
|
||||||
if strings.HasPrefix(name, "golang_org") {
|
|
||||||
name = path.Join("vendor", name)
|
|
||||||
}
|
|
||||||
|
|
||||||
return name
|
|
||||||
}
|
|
||||||
|
|
||||||
// String returns a string representation of the Pkg containing the Pkg name and status.
|
|
||||||
func (p *Pkg) String() string {
|
|
||||||
b := bytes.NewBufferString(p.Name)
|
|
||||||
|
|
||||||
if !p.Resolved {
|
|
||||||
b.Write([]byte(" (unresolved)"))
|
|
||||||
}
|
|
||||||
|
|
||||||
return b.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
// byInternalAndName ensures a slice of Pkgs are sorted such that the internal stdlib
|
|
||||||
// packages are always above external packages (ie. github.com/whatever).
|
|
||||||
type byInternalAndName []Pkg
|
|
||||||
|
|
||||||
func (b byInternalAndName) Len() int {
|
|
||||||
return len(b)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b byInternalAndName) Swap(i, j int) {
|
|
||||||
b[i], b[j] = b[j], b[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b byInternalAndName) Less(i, j int) bool {
|
|
||||||
if b[i].Internal && !b[j].Internal {
|
|
||||||
return true
|
|
||||||
} else if !b[i].Internal && b[j].Internal {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return b[i].Name < b[j].Name
|
|
||||||
}
|
|
5
vendor/github.com/PuerkitoBio/purell/.gitignore
generated
vendored
5
vendor/github.com/PuerkitoBio/purell/.gitignore
generated
vendored
@ -1,5 +0,0 @@
|
|||||||
*.sublime-*
|
|
||||||
.DS_Store
|
|
||||||
*.swp
|
|
||||||
*.swo
|
|
||||||
tags
|
|
12
vendor/github.com/PuerkitoBio/purell/.travis.yml
generated
vendored
12
vendor/github.com/PuerkitoBio/purell/.travis.yml
generated
vendored
@ -1,12 +0,0 @@
|
|||||||
language: go
|
|
||||||
|
|
||||||
go:
|
|
||||||
- 1.4.x
|
|
||||||
- 1.5.x
|
|
||||||
- 1.6.x
|
|
||||||
- 1.7.x
|
|
||||||
- 1.8.x
|
|
||||||
- 1.9.x
|
|
||||||
- "1.10.x"
|
|
||||||
- "1.11.x"
|
|
||||||
- tip
|
|
12
vendor/github.com/PuerkitoBio/purell/LICENSE
generated
vendored
12
vendor/github.com/PuerkitoBio/purell/LICENSE
generated
vendored
@ -1,12 +0,0 @@
|
|||||||
Copyright (c) 2012, Martin Angers
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
188
vendor/github.com/PuerkitoBio/purell/README.md
generated
vendored
188
vendor/github.com/PuerkitoBio/purell/README.md
generated
vendored
@ -1,188 +0,0 @@
|
|||||||
# Purell
|
|
||||||
|
|
||||||
Purell is a tiny Go library to normalize URLs. It returns a pure URL. Pure-ell. Sanitizer and all. Yeah, I know...
|
|
||||||
|
|
||||||
Based on the [wikipedia paper][wiki] and the [RFC 3986 document][rfc].
|
|
||||||
|
|
||||||
[](http://travis-ci.org/PuerkitoBio/purell)
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
`go get github.com/PuerkitoBio/purell`
|
|
||||||
|
|
||||||
## Changelog
|
|
||||||
|
|
||||||
* **v1.1.1** : Fix failing test due to Go1.12 changes (thanks to @ianlancetaylor).
|
|
||||||
* **2016-11-14 (v1.1.0)** : IDN: Conform to RFC 5895: Fold character width (thanks to @beeker1121).
|
|
||||||
* **2016-07-27 (v1.0.0)** : Normalize IDN to ASCII (thanks to @zenovich).
|
|
||||||
* **2015-02-08** : Add fix for relative paths issue ([PR #5][pr5]) and add fix for unnecessary encoding of reserved characters ([see issue #7][iss7]).
|
|
||||||
* **v0.2.0** : Add benchmarks, Attempt IDN support.
|
|
||||||
* **v0.1.0** : Initial release.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
From `example_test.go` (note that in your code, you would import "github.com/PuerkitoBio/purell", and would prefix references to its methods and constants with "purell."):
|
|
||||||
|
|
||||||
```go
|
|
||||||
package purell
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
)
|
|
||||||
|
|
||||||
func ExampleNormalizeURLString() {
|
|
||||||
if normalized, err := NormalizeURLString("hTTp://someWEBsite.com:80/Amazing%3f/url/",
|
|
||||||
FlagLowercaseScheme|FlagLowercaseHost|FlagUppercaseEscapes); err != nil {
|
|
||||||
panic(err)
|
|
||||||
} else {
|
|
||||||
fmt.Print(normalized)
|
|
||||||
}
|
|
||||||
// Output: http://somewebsite.com:80/Amazing%3F/url/
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleMustNormalizeURLString() {
|
|
||||||
normalized := MustNormalizeURLString("hTTpS://someWEBsite.com:443/Amazing%fa/url/",
|
|
||||||
FlagsUnsafeGreedy)
|
|
||||||
fmt.Print(normalized)
|
|
||||||
|
|
||||||
// Output: http://somewebsite.com/Amazing%FA/url
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleNormalizeURL() {
|
|
||||||
if u, err := url.Parse("Http://SomeUrl.com:8080/a/b/.././c///g?c=3&a=1&b=9&c=0#target"); err != nil {
|
|
||||||
panic(err)
|
|
||||||
} else {
|
|
||||||
normalized := NormalizeURL(u, FlagsUsuallySafeGreedy|FlagRemoveDuplicateSlashes|FlagRemoveFragment)
|
|
||||||
fmt.Print(normalized)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Output: http://someurl.com:8080/a/c/g?c=3&a=1&b=9&c=0
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## API
|
|
||||||
|
|
||||||
As seen in the examples above, purell offers three methods, `NormalizeURLString(string, NormalizationFlags) (string, error)`, `MustNormalizeURLString(string, NormalizationFlags) (string)` and `NormalizeURL(*url.URL, NormalizationFlags) (string)`. They all normalize the provided URL based on the specified flags. Here are the available flags:
|
|
||||||
|
|
||||||
```go
|
|
||||||
const (
|
|
||||||
// Safe normalizations
|
|
||||||
FlagLowercaseScheme NormalizationFlags = 1 << iota // HTTP://host -> http://host, applied by default in Go1.1
|
|
||||||
FlagLowercaseHost // http://HOST -> http://host
|
|
||||||
FlagUppercaseEscapes // http://host/t%ef -> http://host/t%EF
|
|
||||||
FlagDecodeUnnecessaryEscapes // http://host/t%41 -> http://host/tA
|
|
||||||
FlagEncodeNecessaryEscapes // http://host/!"#$ -> http://host/%21%22#$
|
|
||||||
FlagRemoveDefaultPort // http://host:80 -> http://host
|
|
||||||
FlagRemoveEmptyQuerySeparator // http://host/path? -> http://host/path
|
|
||||||
|
|
||||||
// Usually safe normalizations
|
|
||||||
FlagRemoveTrailingSlash // http://host/path/ -> http://host/path
|
|
||||||
FlagAddTrailingSlash // http://host/path -> http://host/path/ (should choose only one of these add/remove trailing slash flags)
|
|
||||||
FlagRemoveDotSegments // http://host/path/./a/b/../c -> http://host/path/a/c
|
|
||||||
|
|
||||||
// Unsafe normalizations
|
|
||||||
FlagRemoveDirectoryIndex // http://host/path/index.html -> http://host/path/
|
|
||||||
FlagRemoveFragment // http://host/path#fragment -> http://host/path
|
|
||||||
FlagForceHTTP // https://host -> http://host
|
|
||||||
FlagRemoveDuplicateSlashes // http://host/path//a///b -> http://host/path/a/b
|
|
||||||
FlagRemoveWWW // http://www.host/ -> http://host/
|
|
||||||
FlagAddWWW // http://host/ -> http://www.host/ (should choose only one of these add/remove WWW flags)
|
|
||||||
FlagSortQuery // http://host/path?c=3&b=2&a=1&b=1 -> http://host/path?a=1&b=1&b=2&c=3
|
|
||||||
|
|
||||||
// Normalizations not in the wikipedia article, required to cover tests cases
|
|
||||||
// submitted by jehiah
|
|
||||||
FlagDecodeDWORDHost // http://1113982867 -> http://66.102.7.147
|
|
||||||
FlagDecodeOctalHost // http://0102.0146.07.0223 -> http://66.102.7.147
|
|
||||||
FlagDecodeHexHost // http://0x42660793 -> http://66.102.7.147
|
|
||||||
FlagRemoveUnnecessaryHostDots // http://.host../path -> http://host/path
|
|
||||||
FlagRemoveEmptyPortSeparator // http://host:/path -> http://host/path
|
|
||||||
|
|
||||||
// Convenience set of safe normalizations
|
|
||||||
FlagsSafe NormalizationFlags = FlagLowercaseHost | FlagLowercaseScheme | FlagUppercaseEscapes | FlagDecodeUnnecessaryEscapes | FlagEncodeNecessaryEscapes | FlagRemoveDefaultPort | FlagRemoveEmptyQuerySeparator
|
|
||||||
|
|
||||||
// For convenience sets, "greedy" uses the "remove trailing slash" and "remove www. prefix" flags,
|
|
||||||
// while "non-greedy" uses the "add (or keep) the trailing slash" and "add www. prefix".
|
|
||||||
|
|
||||||
// Convenience set of usually safe normalizations (includes FlagsSafe)
|
|
||||||
FlagsUsuallySafeGreedy NormalizationFlags = FlagsSafe | FlagRemoveTrailingSlash | FlagRemoveDotSegments
|
|
||||||
FlagsUsuallySafeNonGreedy NormalizationFlags = FlagsSafe | FlagAddTrailingSlash | FlagRemoveDotSegments
|
|
||||||
|
|
||||||
// Convenience set of unsafe normalizations (includes FlagsUsuallySafe)
|
|
||||||
FlagsUnsafeGreedy NormalizationFlags = FlagsUsuallySafeGreedy | FlagRemoveDirectoryIndex | FlagRemoveFragment | FlagForceHTTP | FlagRemoveDuplicateSlashes | FlagRemoveWWW | FlagSortQuery
|
|
||||||
FlagsUnsafeNonGreedy NormalizationFlags = FlagsUsuallySafeNonGreedy | FlagRemoveDirectoryIndex | FlagRemoveFragment | FlagForceHTTP | FlagRemoveDuplicateSlashes | FlagAddWWW | FlagSortQuery
|
|
||||||
|
|
||||||
// Convenience set of all available flags
|
|
||||||
FlagsAllGreedy = FlagsUnsafeGreedy | FlagDecodeDWORDHost | FlagDecodeOctalHost | FlagDecodeHexHost | FlagRemoveUnnecessaryHostDots | FlagRemoveEmptyPortSeparator
|
|
||||||
FlagsAllNonGreedy = FlagsUnsafeNonGreedy | FlagDecodeDWORDHost | FlagDecodeOctalHost | FlagDecodeHexHost | FlagRemoveUnnecessaryHostDots | FlagRemoveEmptyPortSeparator
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
For convenience, the set of flags `FlagsSafe`, `FlagsUsuallySafe[Greedy|NonGreedy]`, `FlagsUnsafe[Greedy|NonGreedy]` and `FlagsAll[Greedy|NonGreedy]` are provided for the similarly grouped normalizations on [wikipedia's URL normalization page][wiki]. You can add (using the bitwise OR `|` operator) or remove (using the bitwise AND NOT `&^` operator) individual flags from the sets if required, to build your own custom set.
|
|
||||||
|
|
||||||
The [full godoc reference is available on gopkgdoc][godoc].
|
|
||||||
|
|
||||||
Some things to note:
|
|
||||||
|
|
||||||
* `FlagDecodeUnnecessaryEscapes`, `FlagEncodeNecessaryEscapes`, `FlagUppercaseEscapes` and `FlagRemoveEmptyQuerySeparator` are always implicitly set, because internally, the URL string is parsed as an URL object, which automatically decodes unnecessary escapes, uppercases and encodes necessary ones, and removes empty query separators (an unnecessary `?` at the end of the url). So this operation cannot **not** be done. For this reason, `FlagRemoveEmptyQuerySeparator` (as well as the other three) has been included in the `FlagsSafe` convenience set, instead of `FlagsUnsafe`, where Wikipedia puts it.
|
|
||||||
|
|
||||||
* The `FlagDecodeUnnecessaryEscapes` decodes the following escapes (*from -> to*):
|
|
||||||
- %24 -> $
|
|
||||||
- %26 -> &
|
|
||||||
- %2B-%3B -> +,-./0123456789:;
|
|
||||||
- %3D -> =
|
|
||||||
- %40-%5A -> @ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
|
||||||
- %5F -> _
|
|
||||||
- %61-%7A -> abcdefghijklmnopqrstuvwxyz
|
|
||||||
- %7E -> ~
|
|
||||||
|
|
||||||
|
|
||||||
* When the `NormalizeURL` function is used (passing an URL object), this source URL object is modified (that is, after the call, the URL object will be modified to reflect the normalization).
|
|
||||||
|
|
||||||
* The *replace IP with domain name* normalization (`http://208.77.188.166/ → http://www.example.com/`) is obviously not possible for a library without making some network requests. This is not implemented in purell.
|
|
||||||
|
|
||||||
* The *remove unused query string parameters* and *remove default query parameters* are also not implemented, since this is a very case-specific normalization, and it is quite trivial to do with an URL object.
|
|
||||||
|
|
||||||
### Safe vs Usually Safe vs Unsafe
|
|
||||||
|
|
||||||
Purell allows you to control the level of risk you take while normalizing an URL. You can aggressively normalize, play it totally safe, or anything in between.
|
|
||||||
|
|
||||||
Consider the following URL:
|
|
||||||
|
|
||||||
`HTTPS://www.RooT.com/toto/t%45%1f///a/./b/../c/?z=3&w=2&a=4&w=1#invalid`
|
|
||||||
|
|
||||||
Normalizing with the `FlagsSafe` gives:
|
|
||||||
|
|
||||||
`https://www.root.com/toto/tE%1F///a/./b/../c/?z=3&w=2&a=4&w=1#invalid`
|
|
||||||
|
|
||||||
With the `FlagsUsuallySafeGreedy`:
|
|
||||||
|
|
||||||
`https://www.root.com/toto/tE%1F///a/c?z=3&w=2&a=4&w=1#invalid`
|
|
||||||
|
|
||||||
And with `FlagsUnsafeGreedy`:
|
|
||||||
|
|
||||||
`http://root.com/toto/tE%1F/a/c?a=4&w=1&w=2&z=3`
|
|
||||||
|
|
||||||
## TODOs
|
|
||||||
|
|
||||||
* Add a class/default instance to allow specifying custom directory index names? At the moment, removing directory index removes `(^|/)((?:default|index)\.\w{1,4})$`.
|
|
||||||
|
|
||||||
## Thanks / Contributions
|
|
||||||
|
|
||||||
@rogpeppe
|
|
||||||
@jehiah
|
|
||||||
@opennota
|
|
||||||
@pchristopher1275
|
|
||||||
@zenovich
|
|
||||||
@beeker1121
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
The [BSD 3-Clause license][bsd].
|
|
||||||
|
|
||||||
[bsd]: http://opensource.org/licenses/BSD-3-Clause
|
|
||||||
[wiki]: http://en.wikipedia.org/wiki/URL_normalization
|
|
||||||
[rfc]: http://tools.ietf.org/html/rfc3986#section-6
|
|
||||||
[godoc]: http://go.pkgdoc.org/github.com/PuerkitoBio/purell
|
|
||||||
[pr5]: https://github.com/PuerkitoBio/purell/pull/5
|
|
||||||
[iss7]: https://github.com/PuerkitoBio/purell/issues/7
|
|
379
vendor/github.com/PuerkitoBio/purell/purell.go
generated
vendored
379
vendor/github.com/PuerkitoBio/purell/purell.go
generated
vendored
@ -1,379 +0,0 @@
|
|||||||
/*
|
|
||||||
Package purell offers URL normalization as described on the wikipedia page:
|
|
||||||
http://en.wikipedia.org/wiki/URL_normalization
|
|
||||||
*/
|
|
||||||
package purell
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
"regexp"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/urlesc"
|
|
||||||
"golang.org/x/net/idna"
|
|
||||||
"golang.org/x/text/unicode/norm"
|
|
||||||
"golang.org/x/text/width"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A set of normalization flags determines how a URL will
|
|
||||||
// be normalized.
|
|
||||||
type NormalizationFlags uint
|
|
||||||
|
|
||||||
const (
|
|
||||||
// Safe normalizations
|
|
||||||
FlagLowercaseScheme NormalizationFlags = 1 << iota // HTTP://host -> http://host, applied by default in Go1.1
|
|
||||||
FlagLowercaseHost // http://HOST -> http://host
|
|
||||||
FlagUppercaseEscapes // http://host/t%ef -> http://host/t%EF
|
|
||||||
FlagDecodeUnnecessaryEscapes // http://host/t%41 -> http://host/tA
|
|
||||||
FlagEncodeNecessaryEscapes // http://host/!"#$ -> http://host/%21%22#$
|
|
||||||
FlagRemoveDefaultPort // http://host:80 -> http://host
|
|
||||||
FlagRemoveEmptyQuerySeparator // http://host/path? -> http://host/path
|
|
||||||
|
|
||||||
// Usually safe normalizations
|
|
||||||
FlagRemoveTrailingSlash // http://host/path/ -> http://host/path
|
|
||||||
FlagAddTrailingSlash // http://host/path -> http://host/path/ (should choose only one of these add/remove trailing slash flags)
|
|
||||||
FlagRemoveDotSegments // http://host/path/./a/b/../c -> http://host/path/a/c
|
|
||||||
|
|
||||||
// Unsafe normalizations
|
|
||||||
FlagRemoveDirectoryIndex // http://host/path/index.html -> http://host/path/
|
|
||||||
FlagRemoveFragment // http://host/path#fragment -> http://host/path
|
|
||||||
FlagForceHTTP // https://host -> http://host
|
|
||||||
FlagRemoveDuplicateSlashes // http://host/path//a///b -> http://host/path/a/b
|
|
||||||
FlagRemoveWWW // http://www.host/ -> http://host/
|
|
||||||
FlagAddWWW // http://host/ -> http://www.host/ (should choose only one of these add/remove WWW flags)
|
|
||||||
FlagSortQuery // http://host/path?c=3&b=2&a=1&b=1 -> http://host/path?a=1&b=1&b=2&c=3
|
|
||||||
|
|
||||||
// Normalizations not in the wikipedia article, required to cover tests cases
|
|
||||||
// submitted by jehiah
|
|
||||||
FlagDecodeDWORDHost // http://1113982867 -> http://66.102.7.147
|
|
||||||
FlagDecodeOctalHost // http://0102.0146.07.0223 -> http://66.102.7.147
|
|
||||||
FlagDecodeHexHost // http://0x42660793 -> http://66.102.7.147
|
|
||||||
FlagRemoveUnnecessaryHostDots // http://.host../path -> http://host/path
|
|
||||||
FlagRemoveEmptyPortSeparator // http://host:/path -> http://host/path
|
|
||||||
|
|
||||||
// Convenience set of safe normalizations
|
|
||||||
FlagsSafe NormalizationFlags = FlagLowercaseHost | FlagLowercaseScheme | FlagUppercaseEscapes | FlagDecodeUnnecessaryEscapes | FlagEncodeNecessaryEscapes | FlagRemoveDefaultPort | FlagRemoveEmptyQuerySeparator
|
|
||||||
|
|
||||||
// For convenience sets, "greedy" uses the "remove trailing slash" and "remove www. prefix" flags,
|
|
||||||
// while "non-greedy" uses the "add (or keep) the trailing slash" and "add www. prefix".
|
|
||||||
|
|
||||||
// Convenience set of usually safe normalizations (includes FlagsSafe)
|
|
||||||
FlagsUsuallySafeGreedy NormalizationFlags = FlagsSafe | FlagRemoveTrailingSlash | FlagRemoveDotSegments
|
|
||||||
FlagsUsuallySafeNonGreedy NormalizationFlags = FlagsSafe | FlagAddTrailingSlash | FlagRemoveDotSegments
|
|
||||||
|
|
||||||
// Convenience set of unsafe normalizations (includes FlagsUsuallySafe)
|
|
||||||
FlagsUnsafeGreedy NormalizationFlags = FlagsUsuallySafeGreedy | FlagRemoveDirectoryIndex | FlagRemoveFragment | FlagForceHTTP | FlagRemoveDuplicateSlashes | FlagRemoveWWW | FlagSortQuery
|
|
||||||
FlagsUnsafeNonGreedy NormalizationFlags = FlagsUsuallySafeNonGreedy | FlagRemoveDirectoryIndex | FlagRemoveFragment | FlagForceHTTP | FlagRemoveDuplicateSlashes | FlagAddWWW | FlagSortQuery
|
|
||||||
|
|
||||||
// Convenience set of all available flags
|
|
||||||
FlagsAllGreedy = FlagsUnsafeGreedy | FlagDecodeDWORDHost | FlagDecodeOctalHost | FlagDecodeHexHost | FlagRemoveUnnecessaryHostDots | FlagRemoveEmptyPortSeparator
|
|
||||||
FlagsAllNonGreedy = FlagsUnsafeNonGreedy | FlagDecodeDWORDHost | FlagDecodeOctalHost | FlagDecodeHexHost | FlagRemoveUnnecessaryHostDots | FlagRemoveEmptyPortSeparator
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
defaultHttpPort = ":80"
|
|
||||||
defaultHttpsPort = ":443"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Regular expressions used by the normalizations
|
|
||||||
var rxPort = regexp.MustCompile(`(:\d+)/?$`)
|
|
||||||
var rxDirIndex = regexp.MustCompile(`(^|/)((?:default|index)\.\w{1,4})$`)
|
|
||||||
var rxDupSlashes = regexp.MustCompile(`/{2,}`)
|
|
||||||
var rxDWORDHost = regexp.MustCompile(`^(\d+)((?:\.+)?(?:\:\d*)?)$`)
|
|
||||||
var rxOctalHost = regexp.MustCompile(`^(0\d*)\.(0\d*)\.(0\d*)\.(0\d*)((?:\.+)?(?:\:\d*)?)$`)
|
|
||||||
var rxHexHost = regexp.MustCompile(`^0x([0-9A-Fa-f]+)((?:\.+)?(?:\:\d*)?)$`)
|
|
||||||
var rxHostDots = regexp.MustCompile(`^(.+?)(:\d+)?$`)
|
|
||||||
var rxEmptyPort = regexp.MustCompile(`:+$`)
|
|
||||||
|
|
||||||
// Map of flags to implementation function.
|
|
||||||
// FlagDecodeUnnecessaryEscapes has no action, since it is done automatically
|
|
||||||
// by parsing the string as an URL. Same for FlagUppercaseEscapes and FlagRemoveEmptyQuerySeparator.
|
|
||||||
|
|
||||||
// Since maps have undefined traversing order, make a slice of ordered keys
|
|
||||||
var flagsOrder = []NormalizationFlags{
|
|
||||||
FlagLowercaseScheme,
|
|
||||||
FlagLowercaseHost,
|
|
||||||
FlagRemoveDefaultPort,
|
|
||||||
FlagRemoveDirectoryIndex,
|
|
||||||
FlagRemoveDotSegments,
|
|
||||||
FlagRemoveFragment,
|
|
||||||
FlagForceHTTP, // Must be after remove default port (because https=443/http=80)
|
|
||||||
FlagRemoveDuplicateSlashes,
|
|
||||||
FlagRemoveWWW,
|
|
||||||
FlagAddWWW,
|
|
||||||
FlagSortQuery,
|
|
||||||
FlagDecodeDWORDHost,
|
|
||||||
FlagDecodeOctalHost,
|
|
||||||
FlagDecodeHexHost,
|
|
||||||
FlagRemoveUnnecessaryHostDots,
|
|
||||||
FlagRemoveEmptyPortSeparator,
|
|
||||||
FlagRemoveTrailingSlash, // These two (add/remove trailing slash) must be last
|
|
||||||
FlagAddTrailingSlash,
|
|
||||||
}
|
|
||||||
|
|
||||||
// ... and then the map, where order is unimportant
|
|
||||||
var flags = map[NormalizationFlags]func(*url.URL){
|
|
||||||
FlagLowercaseScheme: lowercaseScheme,
|
|
||||||
FlagLowercaseHost: lowercaseHost,
|
|
||||||
FlagRemoveDefaultPort: removeDefaultPort,
|
|
||||||
FlagRemoveDirectoryIndex: removeDirectoryIndex,
|
|
||||||
FlagRemoveDotSegments: removeDotSegments,
|
|
||||||
FlagRemoveFragment: removeFragment,
|
|
||||||
FlagForceHTTP: forceHTTP,
|
|
||||||
FlagRemoveDuplicateSlashes: removeDuplicateSlashes,
|
|
||||||
FlagRemoveWWW: removeWWW,
|
|
||||||
FlagAddWWW: addWWW,
|
|
||||||
FlagSortQuery: sortQuery,
|
|
||||||
FlagDecodeDWORDHost: decodeDWORDHost,
|
|
||||||
FlagDecodeOctalHost: decodeOctalHost,
|
|
||||||
FlagDecodeHexHost: decodeHexHost,
|
|
||||||
FlagRemoveUnnecessaryHostDots: removeUnncessaryHostDots,
|
|
||||||
FlagRemoveEmptyPortSeparator: removeEmptyPortSeparator,
|
|
||||||
FlagRemoveTrailingSlash: removeTrailingSlash,
|
|
||||||
FlagAddTrailingSlash: addTrailingSlash,
|
|
||||||
}
|
|
||||||
|
|
||||||
// MustNormalizeURLString returns the normalized string, and panics if an error occurs.
|
|
||||||
// It takes an URL string as input, as well as the normalization flags.
|
|
||||||
func MustNormalizeURLString(u string, f NormalizationFlags) string {
|
|
||||||
result, e := NormalizeURLString(u, f)
|
|
||||||
if e != nil {
|
|
||||||
panic(e)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// NormalizeURLString returns the normalized string, or an error if it can't be parsed into an URL object.
|
|
||||||
// It takes an URL string as input, as well as the normalization flags.
|
|
||||||
func NormalizeURLString(u string, f NormalizationFlags) (string, error) {
|
|
||||||
parsed, err := url.Parse(u)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
if f&FlagLowercaseHost == FlagLowercaseHost {
|
|
||||||
parsed.Host = strings.ToLower(parsed.Host)
|
|
||||||
}
|
|
||||||
|
|
||||||
// The idna package doesn't fully conform to RFC 5895
|
|
||||||
// (https://tools.ietf.org/html/rfc5895), so we do it here.
|
|
||||||
// Taken from Go 1.8 cycle source, courtesy of bradfitz.
|
|
||||||
// TODO: Remove when (if?) idna package conforms to RFC 5895.
|
|
||||||
parsed.Host = width.Fold.String(parsed.Host)
|
|
||||||
parsed.Host = norm.NFC.String(parsed.Host)
|
|
||||||
if parsed.Host, err = idna.ToASCII(parsed.Host); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
return NormalizeURL(parsed, f), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// NormalizeURL returns the normalized string.
|
|
||||||
// It takes a parsed URL object as input, as well as the normalization flags.
|
|
||||||
func NormalizeURL(u *url.URL, f NormalizationFlags) string {
|
|
||||||
for _, k := range flagsOrder {
|
|
||||||
if f&k == k {
|
|
||||||
flags[k](u)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return urlesc.Escape(u)
|
|
||||||
}
|
|
||||||
|
|
||||||
func lowercaseScheme(u *url.URL) {
|
|
||||||
if len(u.Scheme) > 0 {
|
|
||||||
u.Scheme = strings.ToLower(u.Scheme)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func lowercaseHost(u *url.URL) {
|
|
||||||
if len(u.Host) > 0 {
|
|
||||||
u.Host = strings.ToLower(u.Host)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeDefaultPort(u *url.URL) {
|
|
||||||
if len(u.Host) > 0 {
|
|
||||||
scheme := strings.ToLower(u.Scheme)
|
|
||||||
u.Host = rxPort.ReplaceAllStringFunc(u.Host, func(val string) string {
|
|
||||||
if (scheme == "http" && val == defaultHttpPort) || (scheme == "https" && val == defaultHttpsPort) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return val
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeTrailingSlash(u *url.URL) {
|
|
||||||
if l := len(u.Path); l > 0 {
|
|
||||||
if strings.HasSuffix(u.Path, "/") {
|
|
||||||
u.Path = u.Path[:l-1]
|
|
||||||
}
|
|
||||||
} else if l = len(u.Host); l > 0 {
|
|
||||||
if strings.HasSuffix(u.Host, "/") {
|
|
||||||
u.Host = u.Host[:l-1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func addTrailingSlash(u *url.URL) {
|
|
||||||
if l := len(u.Path); l > 0 {
|
|
||||||
if !strings.HasSuffix(u.Path, "/") {
|
|
||||||
u.Path += "/"
|
|
||||||
}
|
|
||||||
} else if l = len(u.Host); l > 0 {
|
|
||||||
if !strings.HasSuffix(u.Host, "/") {
|
|
||||||
u.Host += "/"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeDotSegments(u *url.URL) {
|
|
||||||
if len(u.Path) > 0 {
|
|
||||||
var dotFree []string
|
|
||||||
var lastIsDot bool
|
|
||||||
|
|
||||||
sections := strings.Split(u.Path, "/")
|
|
||||||
for _, s := range sections {
|
|
||||||
if s == ".." {
|
|
||||||
if len(dotFree) > 0 {
|
|
||||||
dotFree = dotFree[:len(dotFree)-1]
|
|
||||||
}
|
|
||||||
} else if s != "." {
|
|
||||||
dotFree = append(dotFree, s)
|
|
||||||
}
|
|
||||||
lastIsDot = (s == "." || s == "..")
|
|
||||||
}
|
|
||||||
// Special case if host does not end with / and new path does not begin with /
|
|
||||||
u.Path = strings.Join(dotFree, "/")
|
|
||||||
if u.Host != "" && !strings.HasSuffix(u.Host, "/") && !strings.HasPrefix(u.Path, "/") {
|
|
||||||
u.Path = "/" + u.Path
|
|
||||||
}
|
|
||||||
// Special case if the last segment was a dot, make sure the path ends with a slash
|
|
||||||
if lastIsDot && !strings.HasSuffix(u.Path, "/") {
|
|
||||||
u.Path += "/"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeDirectoryIndex(u *url.URL) {
|
|
||||||
if len(u.Path) > 0 {
|
|
||||||
u.Path = rxDirIndex.ReplaceAllString(u.Path, "$1")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeFragment(u *url.URL) {
|
|
||||||
u.Fragment = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func forceHTTP(u *url.URL) {
|
|
||||||
if strings.ToLower(u.Scheme) == "https" {
|
|
||||||
u.Scheme = "http"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeDuplicateSlashes(u *url.URL) {
|
|
||||||
if len(u.Path) > 0 {
|
|
||||||
u.Path = rxDupSlashes.ReplaceAllString(u.Path, "/")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeWWW(u *url.URL) {
|
|
||||||
if len(u.Host) > 0 && strings.HasPrefix(strings.ToLower(u.Host), "www.") {
|
|
||||||
u.Host = u.Host[4:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func addWWW(u *url.URL) {
|
|
||||||
if len(u.Host) > 0 && !strings.HasPrefix(strings.ToLower(u.Host), "www.") {
|
|
||||||
u.Host = "www." + u.Host
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func sortQuery(u *url.URL) {
|
|
||||||
q := u.Query()
|
|
||||||
|
|
||||||
if len(q) > 0 {
|
|
||||||
arKeys := make([]string, len(q))
|
|
||||||
i := 0
|
|
||||||
for k := range q {
|
|
||||||
arKeys[i] = k
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
sort.Strings(arKeys)
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
for _, k := range arKeys {
|
|
||||||
sort.Strings(q[k])
|
|
||||||
for _, v := range q[k] {
|
|
||||||
if buf.Len() > 0 {
|
|
||||||
buf.WriteRune('&')
|
|
||||||
}
|
|
||||||
buf.WriteString(fmt.Sprintf("%s=%s", k, urlesc.QueryEscape(v)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rebuild the raw query string
|
|
||||||
u.RawQuery = buf.String()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeDWORDHost(u *url.URL) {
|
|
||||||
if len(u.Host) > 0 {
|
|
||||||
if matches := rxDWORDHost.FindStringSubmatch(u.Host); len(matches) > 2 {
|
|
||||||
var parts [4]int64
|
|
||||||
|
|
||||||
dword, _ := strconv.ParseInt(matches[1], 10, 0)
|
|
||||||
for i, shift := range []uint{24, 16, 8, 0} {
|
|
||||||
parts[i] = dword >> shift & 0xFF
|
|
||||||
}
|
|
||||||
u.Host = fmt.Sprintf("%d.%d.%d.%d%s", parts[0], parts[1], parts[2], parts[3], matches[2])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeOctalHost(u *url.URL) {
|
|
||||||
if len(u.Host) > 0 {
|
|
||||||
if matches := rxOctalHost.FindStringSubmatch(u.Host); len(matches) > 5 {
|
|
||||||
var parts [4]int64
|
|
||||||
|
|
||||||
for i := 1; i <= 4; i++ {
|
|
||||||
parts[i-1], _ = strconv.ParseInt(matches[i], 8, 0)
|
|
||||||
}
|
|
||||||
u.Host = fmt.Sprintf("%d.%d.%d.%d%s", parts[0], parts[1], parts[2], parts[3], matches[5])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeHexHost(u *url.URL) {
|
|
||||||
if len(u.Host) > 0 {
|
|
||||||
if matches := rxHexHost.FindStringSubmatch(u.Host); len(matches) > 2 {
|
|
||||||
// Conversion is safe because of regex validation
|
|
||||||
parsed, _ := strconv.ParseInt(matches[1], 16, 0)
|
|
||||||
// Set host as DWORD (base 10) encoded host
|
|
||||||
u.Host = fmt.Sprintf("%d%s", parsed, matches[2])
|
|
||||||
// The rest is the same as decoding a DWORD host
|
|
||||||
decodeDWORDHost(u)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeUnncessaryHostDots(u *url.URL) {
|
|
||||||
if len(u.Host) > 0 {
|
|
||||||
if matches := rxHostDots.FindStringSubmatch(u.Host); len(matches) > 1 {
|
|
||||||
// Trim the leading and trailing dots
|
|
||||||
u.Host = strings.Trim(matches[1], ".")
|
|
||||||
if len(matches) > 2 {
|
|
||||||
u.Host += matches[2]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeEmptyPortSeparator(u *url.URL) {
|
|
||||||
if len(u.Host) > 0 {
|
|
||||||
u.Host = rxEmptyPort.ReplaceAllString(u.Host, "")
|
|
||||||
}
|
|
||||||
}
|
|
15
vendor/github.com/PuerkitoBio/urlesc/.travis.yml
generated
vendored
15
vendor/github.com/PuerkitoBio/urlesc/.travis.yml
generated
vendored
@ -1,15 +0,0 @@
|
|||||||
language: go
|
|
||||||
|
|
||||||
go:
|
|
||||||
- 1.4.x
|
|
||||||
- 1.5.x
|
|
||||||
- 1.6.x
|
|
||||||
- 1.7.x
|
|
||||||
- 1.8.x
|
|
||||||
- tip
|
|
||||||
|
|
||||||
install:
|
|
||||||
- go build .
|
|
||||||
|
|
||||||
script:
|
|
||||||
- go test -v
|
|
27
vendor/github.com/PuerkitoBio/urlesc/LICENSE
generated
vendored
27
vendor/github.com/PuerkitoBio/urlesc/LICENSE
generated
vendored
@ -1,27 +0,0 @@
|
|||||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are
|
|
||||||
met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
* Redistributions in binary form must reproduce the above
|
|
||||||
copyright notice, this list of conditions and the following disclaimer
|
|
||||||
in the documentation and/or other materials provided with the
|
|
||||||
distribution.
|
|
||||||
* Neither the name of Google Inc. nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
16
vendor/github.com/PuerkitoBio/urlesc/README.md
generated
vendored
16
vendor/github.com/PuerkitoBio/urlesc/README.md
generated
vendored
@ -1,16 +0,0 @@
|
|||||||
urlesc [](https://travis-ci.org/PuerkitoBio/urlesc) [](http://godoc.org/github.com/PuerkitoBio/urlesc)
|
|
||||||
======
|
|
||||||
|
|
||||||
Package urlesc implements query escaping as per RFC 3986.
|
|
||||||
|
|
||||||
It contains some parts of the net/url package, modified so as to allow
|
|
||||||
some reserved characters incorrectly escaped by net/url (see [issue 5684](https://github.com/golang/go/issues/5684)).
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
go get github.com/PuerkitoBio/urlesc
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
Go license (BSD-3-Clause)
|
|
||||||
|
|
180
vendor/github.com/PuerkitoBio/urlesc/urlesc.go
generated
vendored
180
vendor/github.com/PuerkitoBio/urlesc/urlesc.go
generated
vendored
@ -1,180 +0,0 @@
|
|||||||
// Copyright 2009 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// Package urlesc implements query escaping as per RFC 3986.
|
|
||||||
// It contains some parts of the net/url package, modified so as to allow
|
|
||||||
// some reserved characters incorrectly escaped by net/url.
|
|
||||||
// See https://github.com/golang/go/issues/5684
|
|
||||||
package urlesc
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
type encoding int
|
|
||||||
|
|
||||||
const (
|
|
||||||
encodePath encoding = 1 + iota
|
|
||||||
encodeUserPassword
|
|
||||||
encodeQueryComponent
|
|
||||||
encodeFragment
|
|
||||||
)
|
|
||||||
|
|
||||||
// Return true if the specified character should be escaped when
|
|
||||||
// appearing in a URL string, according to RFC 3986.
|
|
||||||
func shouldEscape(c byte, mode encoding) bool {
|
|
||||||
// §2.3 Unreserved characters (alphanum)
|
|
||||||
if 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
switch c {
|
|
||||||
case '-', '.', '_', '~': // §2.3 Unreserved characters (mark)
|
|
||||||
return false
|
|
||||||
|
|
||||||
// §2.2 Reserved characters (reserved)
|
|
||||||
case ':', '/', '?', '#', '[', ']', '@', // gen-delims
|
|
||||||
'!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=': // sub-delims
|
|
||||||
// Different sections of the URL allow a few of
|
|
||||||
// the reserved characters to appear unescaped.
|
|
||||||
switch mode {
|
|
||||||
case encodePath: // §3.3
|
|
||||||
// The RFC allows sub-delims and : @.
|
|
||||||
// '/', '[' and ']' can be used to assign meaning to individual path
|
|
||||||
// segments. This package only manipulates the path as a whole,
|
|
||||||
// so we allow those as well. That leaves only ? and # to escape.
|
|
||||||
return c == '?' || c == '#'
|
|
||||||
|
|
||||||
case encodeUserPassword: // §3.2.1
|
|
||||||
// The RFC allows : and sub-delims in
|
|
||||||
// userinfo. The parsing of userinfo treats ':' as special so we must escape
|
|
||||||
// all the gen-delims.
|
|
||||||
return c == ':' || c == '/' || c == '?' || c == '#' || c == '[' || c == ']' || c == '@'
|
|
||||||
|
|
||||||
case encodeQueryComponent: // §3.4
|
|
||||||
// The RFC allows / and ?.
|
|
||||||
return c != '/' && c != '?'
|
|
||||||
|
|
||||||
case encodeFragment: // §4.1
|
|
||||||
// The RFC text is silent but the grammar allows
|
|
||||||
// everything, so escape nothing but #
|
|
||||||
return c == '#'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Everything else must be escaped.
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// QueryEscape escapes the string so it can be safely placed
|
|
||||||
// inside a URL query.
|
|
||||||
func QueryEscape(s string) string {
|
|
||||||
return escape(s, encodeQueryComponent)
|
|
||||||
}
|
|
||||||
|
|
||||||
func escape(s string, mode encoding) string {
|
|
||||||
spaceCount, hexCount := 0, 0
|
|
||||||
for i := 0; i < len(s); i++ {
|
|
||||||
c := s[i]
|
|
||||||
if shouldEscape(c, mode) {
|
|
||||||
if c == ' ' && mode == encodeQueryComponent {
|
|
||||||
spaceCount++
|
|
||||||
} else {
|
|
||||||
hexCount++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if spaceCount == 0 && hexCount == 0 {
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
t := make([]byte, len(s)+2*hexCount)
|
|
||||||
j := 0
|
|
||||||
for i := 0; i < len(s); i++ {
|
|
||||||
switch c := s[i]; {
|
|
||||||
case c == ' ' && mode == encodeQueryComponent:
|
|
||||||
t[j] = '+'
|
|
||||||
j++
|
|
||||||
case shouldEscape(c, mode):
|
|
||||||
t[j] = '%'
|
|
||||||
t[j+1] = "0123456789ABCDEF"[c>>4]
|
|
||||||
t[j+2] = "0123456789ABCDEF"[c&15]
|
|
||||||
j += 3
|
|
||||||
default:
|
|
||||||
t[j] = s[i]
|
|
||||||
j++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return string(t)
|
|
||||||
}
|
|
||||||
|
|
||||||
var uiReplacer = strings.NewReplacer(
|
|
||||||
"%21", "!",
|
|
||||||
"%27", "'",
|
|
||||||
"%28", "(",
|
|
||||||
"%29", ")",
|
|
||||||
"%2A", "*",
|
|
||||||
)
|
|
||||||
|
|
||||||
// unescapeUserinfo unescapes some characters that need not to be escaped as per RFC3986.
|
|
||||||
func unescapeUserinfo(s string) string {
|
|
||||||
return uiReplacer.Replace(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Escape reassembles the URL into a valid URL string.
|
|
||||||
// The general form of the result is one of:
|
|
||||||
//
|
|
||||||
// scheme:opaque
|
|
||||||
// scheme://userinfo@host/path?query#fragment
|
|
||||||
//
|
|
||||||
// If u.Opaque is non-empty, String uses the first form;
|
|
||||||
// otherwise it uses the second form.
|
|
||||||
//
|
|
||||||
// In the second form, the following rules apply:
|
|
||||||
// - if u.Scheme is empty, scheme: is omitted.
|
|
||||||
// - if u.User is nil, userinfo@ is omitted.
|
|
||||||
// - if u.Host is empty, host/ is omitted.
|
|
||||||
// - if u.Scheme and u.Host are empty and u.User is nil,
|
|
||||||
// the entire scheme://userinfo@host/ is omitted.
|
|
||||||
// - if u.Host is non-empty and u.Path begins with a /,
|
|
||||||
// the form host/path does not add its own /.
|
|
||||||
// - if u.RawQuery is empty, ?query is omitted.
|
|
||||||
// - if u.Fragment is empty, #fragment is omitted.
|
|
||||||
func Escape(u *url.URL) string {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
if u.Scheme != "" {
|
|
||||||
buf.WriteString(u.Scheme)
|
|
||||||
buf.WriteByte(':')
|
|
||||||
}
|
|
||||||
if u.Opaque != "" {
|
|
||||||
buf.WriteString(u.Opaque)
|
|
||||||
} else {
|
|
||||||
if u.Scheme != "" || u.Host != "" || u.User != nil {
|
|
||||||
buf.WriteString("//")
|
|
||||||
if ui := u.User; ui != nil {
|
|
||||||
buf.WriteString(unescapeUserinfo(ui.String()))
|
|
||||||
buf.WriteByte('@')
|
|
||||||
}
|
|
||||||
if h := u.Host; h != "" {
|
|
||||||
buf.WriteString(h)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if u.Path != "" && u.Path[0] != '/' && u.Host != "" {
|
|
||||||
buf.WriteByte('/')
|
|
||||||
}
|
|
||||||
buf.WriteString(escape(u.Path, encodePath))
|
|
||||||
}
|
|
||||||
if u.RawQuery != "" {
|
|
||||||
buf.WriteByte('?')
|
|
||||||
buf.WriteString(u.RawQuery)
|
|
||||||
}
|
|
||||||
if u.Fragment != "" {
|
|
||||||
buf.WriteByte('#')
|
|
||||||
buf.WriteString(escape(u.Fragment, encodeFragment))
|
|
||||||
}
|
|
||||||
return buf.String()
|
|
||||||
}
|
|
27
vendor/github.com/alecthomas/template/LICENSE
generated
vendored
27
vendor/github.com/alecthomas/template/LICENSE
generated
vendored
@ -1,27 +0,0 @@
|
|||||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are
|
|
||||||
met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
* Redistributions in binary form must reproduce the above
|
|
||||||
copyright notice, this list of conditions and the following disclaimer
|
|
||||||
in the documentation and/or other materials provided with the
|
|
||||||
distribution.
|
|
||||||
* Neither the name of Google Inc. nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
25
vendor/github.com/alecthomas/template/README.md
generated
vendored
25
vendor/github.com/alecthomas/template/README.md
generated
vendored
@ -1,25 +0,0 @@
|
|||||||
# Go's `text/template` package with newline elision
|
|
||||||
|
|
||||||
This is a fork of Go 1.4's [text/template](http://golang.org/pkg/text/template/) package with one addition: a backslash immediately after a closing delimiter will delete all subsequent newlines until a non-newline.
|
|
||||||
|
|
||||||
eg.
|
|
||||||
|
|
||||||
```
|
|
||||||
{{if true}}\
|
|
||||||
hello
|
|
||||||
{{end}}\
|
|
||||||
```
|
|
||||||
|
|
||||||
Will result in:
|
|
||||||
|
|
||||||
```
|
|
||||||
hello\n
|
|
||||||
```
|
|
||||||
|
|
||||||
Rather than:
|
|
||||||
|
|
||||||
```
|
|
||||||
\n
|
|
||||||
hello\n
|
|
||||||
\n
|
|
||||||
```
|
|
406
vendor/github.com/alecthomas/template/doc.go
generated
vendored
406
vendor/github.com/alecthomas/template/doc.go
generated
vendored
@ -1,406 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
/*
|
|
||||||
Package template implements data-driven templates for generating textual output.
|
|
||||||
|
|
||||||
To generate HTML output, see package html/template, which has the same interface
|
|
||||||
as this package but automatically secures HTML output against certain attacks.
|
|
||||||
|
|
||||||
Templates are executed by applying them to a data structure. Annotations in the
|
|
||||||
template refer to elements of the data structure (typically a field of a struct
|
|
||||||
or a key in a map) to control execution and derive values to be displayed.
|
|
||||||
Execution of the template walks the structure and sets the cursor, represented
|
|
||||||
by a period '.' and called "dot", to the value at the current location in the
|
|
||||||
structure as execution proceeds.
|
|
||||||
|
|
||||||
The input text for a template is UTF-8-encoded text in any format.
|
|
||||||
"Actions"--data evaluations or control structures--are delimited by
|
|
||||||
"{{" and "}}"; all text outside actions is copied to the output unchanged.
|
|
||||||
Actions may not span newlines, although comments can.
|
|
||||||
|
|
||||||
Once parsed, a template may be executed safely in parallel.
|
|
||||||
|
|
||||||
Here is a trivial example that prints "17 items are made of wool".
|
|
||||||
|
|
||||||
type Inventory struct {
|
|
||||||
Material string
|
|
||||||
Count uint
|
|
||||||
}
|
|
||||||
sweaters := Inventory{"wool", 17}
|
|
||||||
tmpl, err := template.New("test").Parse("{{.Count}} items are made of {{.Material}}")
|
|
||||||
if err != nil { panic(err) }
|
|
||||||
err = tmpl.Execute(os.Stdout, sweaters)
|
|
||||||
if err != nil { panic(err) }
|
|
||||||
|
|
||||||
More intricate examples appear below.
|
|
||||||
|
|
||||||
Actions
|
|
||||||
|
|
||||||
Here is the list of actions. "Arguments" and "pipelines" are evaluations of
|
|
||||||
data, defined in detail below.
|
|
||||||
|
|
||||||
*/
|
|
||||||
// {{/* a comment */}}
|
|
||||||
// A comment; discarded. May contain newlines.
|
|
||||||
// Comments do not nest and must start and end at the
|
|
||||||
// delimiters, as shown here.
|
|
||||||
/*
|
|
||||||
|
|
||||||
{{pipeline}}
|
|
||||||
The default textual representation of the value of the pipeline
|
|
||||||
is copied to the output.
|
|
||||||
|
|
||||||
{{if pipeline}} T1 {{end}}
|
|
||||||
If the value of the pipeline is empty, no output is generated;
|
|
||||||
otherwise, T1 is executed. The empty values are false, 0, any
|
|
||||||
nil pointer or interface value, and any array, slice, map, or
|
|
||||||
string of length zero.
|
|
||||||
Dot is unaffected.
|
|
||||||
|
|
||||||
{{if pipeline}} T1 {{else}} T0 {{end}}
|
|
||||||
If the value of the pipeline is empty, T0 is executed;
|
|
||||||
otherwise, T1 is executed. Dot is unaffected.
|
|
||||||
|
|
||||||
{{if pipeline}} T1 {{else if pipeline}} T0 {{end}}
|
|
||||||
To simplify the appearance of if-else chains, the else action
|
|
||||||
of an if may include another if directly; the effect is exactly
|
|
||||||
the same as writing
|
|
||||||
{{if pipeline}} T1 {{else}}{{if pipeline}} T0 {{end}}{{end}}
|
|
||||||
|
|
||||||
{{range pipeline}} T1 {{end}}
|
|
||||||
The value of the pipeline must be an array, slice, map, or channel.
|
|
||||||
If the value of the pipeline has length zero, nothing is output;
|
|
||||||
otherwise, dot is set to the successive elements of the array,
|
|
||||||
slice, or map and T1 is executed. If the value is a map and the
|
|
||||||
keys are of basic type with a defined order ("comparable"), the
|
|
||||||
elements will be visited in sorted key order.
|
|
||||||
|
|
||||||
{{range pipeline}} T1 {{else}} T0 {{end}}
|
|
||||||
The value of the pipeline must be an array, slice, map, or channel.
|
|
||||||
If the value of the pipeline has length zero, dot is unaffected and
|
|
||||||
T0 is executed; otherwise, dot is set to the successive elements
|
|
||||||
of the array, slice, or map and T1 is executed.
|
|
||||||
|
|
||||||
{{template "name"}}
|
|
||||||
The template with the specified name is executed with nil data.
|
|
||||||
|
|
||||||
{{template "name" pipeline}}
|
|
||||||
The template with the specified name is executed with dot set
|
|
||||||
to the value of the pipeline.
|
|
||||||
|
|
||||||
{{with pipeline}} T1 {{end}}
|
|
||||||
If the value of the pipeline is empty, no output is generated;
|
|
||||||
otherwise, dot is set to the value of the pipeline and T1 is
|
|
||||||
executed.
|
|
||||||
|
|
||||||
{{with pipeline}} T1 {{else}} T0 {{end}}
|
|
||||||
If the value of the pipeline is empty, dot is unaffected and T0
|
|
||||||
is executed; otherwise, dot is set to the value of the pipeline
|
|
||||||
and T1 is executed.
|
|
||||||
|
|
||||||
Arguments
|
|
||||||
|
|
||||||
An argument is a simple value, denoted by one of the following.
|
|
||||||
|
|
||||||
- A boolean, string, character, integer, floating-point, imaginary
|
|
||||||
or complex constant in Go syntax. These behave like Go's untyped
|
|
||||||
constants, although raw strings may not span newlines.
|
|
||||||
- The keyword nil, representing an untyped Go nil.
|
|
||||||
- The character '.' (period):
|
|
||||||
.
|
|
||||||
The result is the value of dot.
|
|
||||||
- A variable name, which is a (possibly empty) alphanumeric string
|
|
||||||
preceded by a dollar sign, such as
|
|
||||||
$piOver2
|
|
||||||
or
|
|
||||||
$
|
|
||||||
The result is the value of the variable.
|
|
||||||
Variables are described below.
|
|
||||||
- The name of a field of the data, which must be a struct, preceded
|
|
||||||
by a period, such as
|
|
||||||
.Field
|
|
||||||
The result is the value of the field. Field invocations may be
|
|
||||||
chained:
|
|
||||||
.Field1.Field2
|
|
||||||
Fields can also be evaluated on variables, including chaining:
|
|
||||||
$x.Field1.Field2
|
|
||||||
- The name of a key of the data, which must be a map, preceded
|
|
||||||
by a period, such as
|
|
||||||
.Key
|
|
||||||
The result is the map element value indexed by the key.
|
|
||||||
Key invocations may be chained and combined with fields to any
|
|
||||||
depth:
|
|
||||||
.Field1.Key1.Field2.Key2
|
|
||||||
Although the key must be an alphanumeric identifier, unlike with
|
|
||||||
field names they do not need to start with an upper case letter.
|
|
||||||
Keys can also be evaluated on variables, including chaining:
|
|
||||||
$x.key1.key2
|
|
||||||
- The name of a niladic method of the data, preceded by a period,
|
|
||||||
such as
|
|
||||||
.Method
|
|
||||||
The result is the value of invoking the method with dot as the
|
|
||||||
receiver, dot.Method(). Such a method must have one return value (of
|
|
||||||
any type) or two return values, the second of which is an error.
|
|
||||||
If it has two and the returned error is non-nil, execution terminates
|
|
||||||
and an error is returned to the caller as the value of Execute.
|
|
||||||
Method invocations may be chained and combined with fields and keys
|
|
||||||
to any depth:
|
|
||||||
.Field1.Key1.Method1.Field2.Key2.Method2
|
|
||||||
Methods can also be evaluated on variables, including chaining:
|
|
||||||
$x.Method1.Field
|
|
||||||
- The name of a niladic function, such as
|
|
||||||
fun
|
|
||||||
The result is the value of invoking the function, fun(). The return
|
|
||||||
types and values behave as in methods. Functions and function
|
|
||||||
names are described below.
|
|
||||||
- A parenthesized instance of one the above, for grouping. The result
|
|
||||||
may be accessed by a field or map key invocation.
|
|
||||||
print (.F1 arg1) (.F2 arg2)
|
|
||||||
(.StructValuedMethod "arg").Field
|
|
||||||
|
|
||||||
Arguments may evaluate to any type; if they are pointers the implementation
|
|
||||||
automatically indirects to the base type when required.
|
|
||||||
If an evaluation yields a function value, such as a function-valued
|
|
||||||
field of a struct, the function is not invoked automatically, but it
|
|
||||||
can be used as a truth value for an if action and the like. To invoke
|
|
||||||
it, use the call function, defined below.
|
|
||||||
|
|
||||||
A pipeline is a possibly chained sequence of "commands". A command is a simple
|
|
||||||
value (argument) or a function or method call, possibly with multiple arguments:
|
|
||||||
|
|
||||||
Argument
|
|
||||||
The result is the value of evaluating the argument.
|
|
||||||
.Method [Argument...]
|
|
||||||
The method can be alone or the last element of a chain but,
|
|
||||||
unlike methods in the middle of a chain, it can take arguments.
|
|
||||||
The result is the value of calling the method with the
|
|
||||||
arguments:
|
|
||||||
dot.Method(Argument1, etc.)
|
|
||||||
functionName [Argument...]
|
|
||||||
The result is the value of calling the function associated
|
|
||||||
with the name:
|
|
||||||
function(Argument1, etc.)
|
|
||||||
Functions and function names are described below.
|
|
||||||
|
|
||||||
Pipelines
|
|
||||||
|
|
||||||
A pipeline may be "chained" by separating a sequence of commands with pipeline
|
|
||||||
characters '|'. In a chained pipeline, the result of the each command is
|
|
||||||
passed as the last argument of the following command. The output of the final
|
|
||||||
command in the pipeline is the value of the pipeline.
|
|
||||||
|
|
||||||
The output of a command will be either one value or two values, the second of
|
|
||||||
which has type error. If that second value is present and evaluates to
|
|
||||||
non-nil, execution terminates and the error is returned to the caller of
|
|
||||||
Execute.
|
|
||||||
|
|
||||||
Variables
|
|
||||||
|
|
||||||
A pipeline inside an action may initialize a variable to capture the result.
|
|
||||||
The initialization has syntax
|
|
||||||
|
|
||||||
$variable := pipeline
|
|
||||||
|
|
||||||
where $variable is the name of the variable. An action that declares a
|
|
||||||
variable produces no output.
|
|
||||||
|
|
||||||
If a "range" action initializes a variable, the variable is set to the
|
|
||||||
successive elements of the iteration. Also, a "range" may declare two
|
|
||||||
variables, separated by a comma:
|
|
||||||
|
|
||||||
range $index, $element := pipeline
|
|
||||||
|
|
||||||
in which case $index and $element are set to the successive values of the
|
|
||||||
array/slice index or map key and element, respectively. Note that if there is
|
|
||||||
only one variable, it is assigned the element; this is opposite to the
|
|
||||||
convention in Go range clauses.
|
|
||||||
|
|
||||||
A variable's scope extends to the "end" action of the control structure ("if",
|
|
||||||
"with", or "range") in which it is declared, or to the end of the template if
|
|
||||||
there is no such control structure. A template invocation does not inherit
|
|
||||||
variables from the point of its invocation.
|
|
||||||
|
|
||||||
When execution begins, $ is set to the data argument passed to Execute, that is,
|
|
||||||
to the starting value of dot.
|
|
||||||
|
|
||||||
Examples
|
|
||||||
|
|
||||||
Here are some example one-line templates demonstrating pipelines and variables.
|
|
||||||
All produce the quoted word "output":
|
|
||||||
|
|
||||||
{{"\"output\""}}
|
|
||||||
A string constant.
|
|
||||||
{{`"output"`}}
|
|
||||||
A raw string constant.
|
|
||||||
{{printf "%q" "output"}}
|
|
||||||
A function call.
|
|
||||||
{{"output" | printf "%q"}}
|
|
||||||
A function call whose final argument comes from the previous
|
|
||||||
command.
|
|
||||||
{{printf "%q" (print "out" "put")}}
|
|
||||||
A parenthesized argument.
|
|
||||||
{{"put" | printf "%s%s" "out" | printf "%q"}}
|
|
||||||
A more elaborate call.
|
|
||||||
{{"output" | printf "%s" | printf "%q"}}
|
|
||||||
A longer chain.
|
|
||||||
{{with "output"}}{{printf "%q" .}}{{end}}
|
|
||||||
A with action using dot.
|
|
||||||
{{with $x := "output" | printf "%q"}}{{$x}}{{end}}
|
|
||||||
A with action that creates and uses a variable.
|
|
||||||
{{with $x := "output"}}{{printf "%q" $x}}{{end}}
|
|
||||||
A with action that uses the variable in another action.
|
|
||||||
{{with $x := "output"}}{{$x | printf "%q"}}{{end}}
|
|
||||||
The same, but pipelined.
|
|
||||||
|
|
||||||
Functions
|
|
||||||
|
|
||||||
During execution functions are found in two function maps: first in the
|
|
||||||
template, then in the global function map. By default, no functions are defined
|
|
||||||
in the template but the Funcs method can be used to add them.
|
|
||||||
|
|
||||||
Predefined global functions are named as follows.
|
|
||||||
|
|
||||||
and
|
|
||||||
Returns the boolean AND of its arguments by returning the
|
|
||||||
first empty argument or the last argument, that is,
|
|
||||||
"and x y" behaves as "if x then y else x". All the
|
|
||||||
arguments are evaluated.
|
|
||||||
call
|
|
||||||
Returns the result of calling the first argument, which
|
|
||||||
must be a function, with the remaining arguments as parameters.
|
|
||||||
Thus "call .X.Y 1 2" is, in Go notation, dot.X.Y(1, 2) where
|
|
||||||
Y is a func-valued field, map entry, or the like.
|
|
||||||
The first argument must be the result of an evaluation
|
|
||||||
that yields a value of function type (as distinct from
|
|
||||||
a predefined function such as print). The function must
|
|
||||||
return either one or two result values, the second of which
|
|
||||||
is of type error. If the arguments don't match the function
|
|
||||||
or the returned error value is non-nil, execution stops.
|
|
||||||
html
|
|
||||||
Returns the escaped HTML equivalent of the textual
|
|
||||||
representation of its arguments.
|
|
||||||
index
|
|
||||||
Returns the result of indexing its first argument by the
|
|
||||||
following arguments. Thus "index x 1 2 3" is, in Go syntax,
|
|
||||||
x[1][2][3]. Each indexed item must be a map, slice, or array.
|
|
||||||
js
|
|
||||||
Returns the escaped JavaScript equivalent of the textual
|
|
||||||
representation of its arguments.
|
|
||||||
len
|
|
||||||
Returns the integer length of its argument.
|
|
||||||
not
|
|
||||||
Returns the boolean negation of its single argument.
|
|
||||||
or
|
|
||||||
Returns the boolean OR of its arguments by returning the
|
|
||||||
first non-empty argument or the last argument, that is,
|
|
||||||
"or x y" behaves as "if x then x else y". All the
|
|
||||||
arguments are evaluated.
|
|
||||||
print
|
|
||||||
An alias for fmt.Sprint
|
|
||||||
printf
|
|
||||||
An alias for fmt.Sprintf
|
|
||||||
println
|
|
||||||
An alias for fmt.Sprintln
|
|
||||||
urlquery
|
|
||||||
Returns the escaped value of the textual representation of
|
|
||||||
its arguments in a form suitable for embedding in a URL query.
|
|
||||||
|
|
||||||
The boolean functions take any zero value to be false and a non-zero
|
|
||||||
value to be true.
|
|
||||||
|
|
||||||
There is also a set of binary comparison operators defined as
|
|
||||||
functions:
|
|
||||||
|
|
||||||
eq
|
|
||||||
Returns the boolean truth of arg1 == arg2
|
|
||||||
ne
|
|
||||||
Returns the boolean truth of arg1 != arg2
|
|
||||||
lt
|
|
||||||
Returns the boolean truth of arg1 < arg2
|
|
||||||
le
|
|
||||||
Returns the boolean truth of arg1 <= arg2
|
|
||||||
gt
|
|
||||||
Returns the boolean truth of arg1 > arg2
|
|
||||||
ge
|
|
||||||
Returns the boolean truth of arg1 >= arg2
|
|
||||||
|
|
||||||
For simpler multi-way equality tests, eq (only) accepts two or more
|
|
||||||
arguments and compares the second and subsequent to the first,
|
|
||||||
returning in effect
|
|
||||||
|
|
||||||
arg1==arg2 || arg1==arg3 || arg1==arg4 ...
|
|
||||||
|
|
||||||
(Unlike with || in Go, however, eq is a function call and all the
|
|
||||||
arguments will be evaluated.)
|
|
||||||
|
|
||||||
The comparison functions work on basic types only (or named basic
|
|
||||||
types, such as "type Celsius float32"). They implement the Go rules
|
|
||||||
for comparison of values, except that size and exact type are
|
|
||||||
ignored, so any integer value, signed or unsigned, may be compared
|
|
||||||
with any other integer value. (The arithmetic value is compared,
|
|
||||||
not the bit pattern, so all negative integers are less than all
|
|
||||||
unsigned integers.) However, as usual, one may not compare an int
|
|
||||||
with a float32 and so on.
|
|
||||||
|
|
||||||
Associated templates
|
|
||||||
|
|
||||||
Each template is named by a string specified when it is created. Also, each
|
|
||||||
template is associated with zero or more other templates that it may invoke by
|
|
||||||
name; such associations are transitive and form a name space of templates.
|
|
||||||
|
|
||||||
A template may use a template invocation to instantiate another associated
|
|
||||||
template; see the explanation of the "template" action above. The name must be
|
|
||||||
that of a template associated with the template that contains the invocation.
|
|
||||||
|
|
||||||
Nested template definitions
|
|
||||||
|
|
||||||
When parsing a template, another template may be defined and associated with the
|
|
||||||
template being parsed. Template definitions must appear at the top level of the
|
|
||||||
template, much like global variables in a Go program.
|
|
||||||
|
|
||||||
The syntax of such definitions is to surround each template declaration with a
|
|
||||||
"define" and "end" action.
|
|
||||||
|
|
||||||
The define action names the template being created by providing a string
|
|
||||||
constant. Here is a simple example:
|
|
||||||
|
|
||||||
`{{define "T1"}}ONE{{end}}
|
|
||||||
{{define "T2"}}TWO{{end}}
|
|
||||||
{{define "T3"}}{{template "T1"}} {{template "T2"}}{{end}}
|
|
||||||
{{template "T3"}}`
|
|
||||||
|
|
||||||
This defines two templates, T1 and T2, and a third T3 that invokes the other two
|
|
||||||
when it is executed. Finally it invokes T3. If executed this template will
|
|
||||||
produce the text
|
|
||||||
|
|
||||||
ONE TWO
|
|
||||||
|
|
||||||
By construction, a template may reside in only one association. If it's
|
|
||||||
necessary to have a template addressable from multiple associations, the
|
|
||||||
template definition must be parsed multiple times to create distinct *Template
|
|
||||||
values, or must be copied with the Clone or AddParseTree method.
|
|
||||||
|
|
||||||
Parse may be called multiple times to assemble the various associated templates;
|
|
||||||
see the ParseFiles and ParseGlob functions and methods for simple ways to parse
|
|
||||||
related templates stored in files.
|
|
||||||
|
|
||||||
A template may be executed directly or through ExecuteTemplate, which executes
|
|
||||||
an associated template identified by name. To invoke our example above, we
|
|
||||||
might write,
|
|
||||||
|
|
||||||
err := tmpl.Execute(os.Stdout, "no data needed")
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("execution failed: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
or to invoke a particular template explicitly by name,
|
|
||||||
|
|
||||||
err := tmpl.ExecuteTemplate(os.Stdout, "T2", "no data needed")
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("execution failed: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
*/
|
|
||||||
package template
|
|
845
vendor/github.com/alecthomas/template/exec.go
generated
vendored
845
vendor/github.com/alecthomas/template/exec.go
generated
vendored
@ -1,845 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package template
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"reflect"
|
|
||||||
"runtime"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/alecthomas/template/parse"
|
|
||||||
)
|
|
||||||
|
|
||||||
// state represents the state of an execution. It's not part of the
|
|
||||||
// template so that multiple executions of the same template
|
|
||||||
// can execute in parallel.
|
|
||||||
type state struct {
|
|
||||||
tmpl *Template
|
|
||||||
wr io.Writer
|
|
||||||
node parse.Node // current node, for errors
|
|
||||||
vars []variable // push-down stack of variable values.
|
|
||||||
}
|
|
||||||
|
|
||||||
// variable holds the dynamic value of a variable such as $, $x etc.
|
|
||||||
type variable struct {
|
|
||||||
name string
|
|
||||||
value reflect.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
// push pushes a new variable on the stack.
|
|
||||||
func (s *state) push(name string, value reflect.Value) {
|
|
||||||
s.vars = append(s.vars, variable{name, value})
|
|
||||||
}
|
|
||||||
|
|
||||||
// mark returns the length of the variable stack.
|
|
||||||
func (s *state) mark() int {
|
|
||||||
return len(s.vars)
|
|
||||||
}
|
|
||||||
|
|
||||||
// pop pops the variable stack up to the mark.
|
|
||||||
func (s *state) pop(mark int) {
|
|
||||||
s.vars = s.vars[0:mark]
|
|
||||||
}
|
|
||||||
|
|
||||||
// setVar overwrites the top-nth variable on the stack. Used by range iterations.
|
|
||||||
func (s *state) setVar(n int, value reflect.Value) {
|
|
||||||
s.vars[len(s.vars)-n].value = value
|
|
||||||
}
|
|
||||||
|
|
||||||
// varValue returns the value of the named variable.
|
|
||||||
func (s *state) varValue(name string) reflect.Value {
|
|
||||||
for i := s.mark() - 1; i >= 0; i-- {
|
|
||||||
if s.vars[i].name == name {
|
|
||||||
return s.vars[i].value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
s.errorf("undefined variable: %s", name)
|
|
||||||
return zero
|
|
||||||
}
|
|
||||||
|
|
||||||
var zero reflect.Value
|
|
||||||
|
|
||||||
// at marks the state to be on node n, for error reporting.
|
|
||||||
func (s *state) at(node parse.Node) {
|
|
||||||
s.node = node
|
|
||||||
}
|
|
||||||
|
|
||||||
// doublePercent returns the string with %'s replaced by %%, if necessary,
|
|
||||||
// so it can be used safely inside a Printf format string.
|
|
||||||
func doublePercent(str string) string {
|
|
||||||
if strings.Contains(str, "%") {
|
|
||||||
str = strings.Replace(str, "%", "%%", -1)
|
|
||||||
}
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
|
|
||||||
// errorf formats the error and terminates processing.
|
|
||||||
func (s *state) errorf(format string, args ...interface{}) {
|
|
||||||
name := doublePercent(s.tmpl.Name())
|
|
||||||
if s.node == nil {
|
|
||||||
format = fmt.Sprintf("template: %s: %s", name, format)
|
|
||||||
} else {
|
|
||||||
location, context := s.tmpl.ErrorContext(s.node)
|
|
||||||
format = fmt.Sprintf("template: %s: executing %q at <%s>: %s", location, name, doublePercent(context), format)
|
|
||||||
}
|
|
||||||
panic(fmt.Errorf(format, args...))
|
|
||||||
}
|
|
||||||
|
|
||||||
// errRecover is the handler that turns panics into returns from the top
|
|
||||||
// level of Parse.
|
|
||||||
func errRecover(errp *error) {
|
|
||||||
e := recover()
|
|
||||||
if e != nil {
|
|
||||||
switch err := e.(type) {
|
|
||||||
case runtime.Error:
|
|
||||||
panic(e)
|
|
||||||
case error:
|
|
||||||
*errp = err
|
|
||||||
default:
|
|
||||||
panic(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExecuteTemplate applies the template associated with t that has the given name
|
|
||||||
// to the specified data object and writes the output to wr.
|
|
||||||
// If an error occurs executing the template or writing its output,
|
|
||||||
// execution stops, but partial results may already have been written to
|
|
||||||
// the output writer.
|
|
||||||
// A template may be executed safely in parallel.
|
|
||||||
func (t *Template) ExecuteTemplate(wr io.Writer, name string, data interface{}) error {
|
|
||||||
tmpl := t.tmpl[name]
|
|
||||||
if tmpl == nil {
|
|
||||||
return fmt.Errorf("template: no template %q associated with template %q", name, t.name)
|
|
||||||
}
|
|
||||||
return tmpl.Execute(wr, data)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute applies a parsed template to the specified data object,
|
|
||||||
// and writes the output to wr.
|
|
||||||
// If an error occurs executing the template or writing its output,
|
|
||||||
// execution stops, but partial results may already have been written to
|
|
||||||
// the output writer.
|
|
||||||
// A template may be executed safely in parallel.
|
|
||||||
func (t *Template) Execute(wr io.Writer, data interface{}) (err error) {
|
|
||||||
defer errRecover(&err)
|
|
||||||
value := reflect.ValueOf(data)
|
|
||||||
state := &state{
|
|
||||||
tmpl: t,
|
|
||||||
wr: wr,
|
|
||||||
vars: []variable{{"$", value}},
|
|
||||||
}
|
|
||||||
t.init()
|
|
||||||
if t.Tree == nil || t.Root == nil {
|
|
||||||
var b bytes.Buffer
|
|
||||||
for name, tmpl := range t.tmpl {
|
|
||||||
if tmpl.Tree == nil || tmpl.Root == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if b.Len() > 0 {
|
|
||||||
b.WriteString(", ")
|
|
||||||
}
|
|
||||||
fmt.Fprintf(&b, "%q", name)
|
|
||||||
}
|
|
||||||
var s string
|
|
||||||
if b.Len() > 0 {
|
|
||||||
s = "; defined templates are: " + b.String()
|
|
||||||
}
|
|
||||||
state.errorf("%q is an incomplete or empty template%s", t.Name(), s)
|
|
||||||
}
|
|
||||||
state.walk(value, t.Root)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Walk functions step through the major pieces of the template structure,
|
|
||||||
// generating output as they go.
|
|
||||||
func (s *state) walk(dot reflect.Value, node parse.Node) {
|
|
||||||
s.at(node)
|
|
||||||
switch node := node.(type) {
|
|
||||||
case *parse.ActionNode:
|
|
||||||
// Do not pop variables so they persist until next end.
|
|
||||||
// Also, if the action declares variables, don't print the result.
|
|
||||||
val := s.evalPipeline(dot, node.Pipe)
|
|
||||||
if len(node.Pipe.Decl) == 0 {
|
|
||||||
s.printValue(node, val)
|
|
||||||
}
|
|
||||||
case *parse.IfNode:
|
|
||||||
s.walkIfOrWith(parse.NodeIf, dot, node.Pipe, node.List, node.ElseList)
|
|
||||||
case *parse.ListNode:
|
|
||||||
for _, node := range node.Nodes {
|
|
||||||
s.walk(dot, node)
|
|
||||||
}
|
|
||||||
case *parse.RangeNode:
|
|
||||||
s.walkRange(dot, node)
|
|
||||||
case *parse.TemplateNode:
|
|
||||||
s.walkTemplate(dot, node)
|
|
||||||
case *parse.TextNode:
|
|
||||||
if _, err := s.wr.Write(node.Text); err != nil {
|
|
||||||
s.errorf("%s", err)
|
|
||||||
}
|
|
||||||
case *parse.WithNode:
|
|
||||||
s.walkIfOrWith(parse.NodeWith, dot, node.Pipe, node.List, node.ElseList)
|
|
||||||
default:
|
|
||||||
s.errorf("unknown node: %s", node)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// walkIfOrWith walks an 'if' or 'with' node. The two control structures
|
|
||||||
// are identical in behavior except that 'with' sets dot.
|
|
||||||
func (s *state) walkIfOrWith(typ parse.NodeType, dot reflect.Value, pipe *parse.PipeNode, list, elseList *parse.ListNode) {
|
|
||||||
defer s.pop(s.mark())
|
|
||||||
val := s.evalPipeline(dot, pipe)
|
|
||||||
truth, ok := isTrue(val)
|
|
||||||
if !ok {
|
|
||||||
s.errorf("if/with can't use %v", val)
|
|
||||||
}
|
|
||||||
if truth {
|
|
||||||
if typ == parse.NodeWith {
|
|
||||||
s.walk(val, list)
|
|
||||||
} else {
|
|
||||||
s.walk(dot, list)
|
|
||||||
}
|
|
||||||
} else if elseList != nil {
|
|
||||||
s.walk(dot, elseList)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// isTrue reports whether the value is 'true', in the sense of not the zero of its type,
|
|
||||||
// and whether the value has a meaningful truth value.
|
|
||||||
func isTrue(val reflect.Value) (truth, ok bool) {
|
|
||||||
if !val.IsValid() {
|
|
||||||
// Something like var x interface{}, never set. It's a form of nil.
|
|
||||||
return false, true
|
|
||||||
}
|
|
||||||
switch val.Kind() {
|
|
||||||
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
|
|
||||||
truth = val.Len() > 0
|
|
||||||
case reflect.Bool:
|
|
||||||
truth = val.Bool()
|
|
||||||
case reflect.Complex64, reflect.Complex128:
|
|
||||||
truth = val.Complex() != 0
|
|
||||||
case reflect.Chan, reflect.Func, reflect.Ptr, reflect.Interface:
|
|
||||||
truth = !val.IsNil()
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
|
||||||
truth = val.Int() != 0
|
|
||||||
case reflect.Float32, reflect.Float64:
|
|
||||||
truth = val.Float() != 0
|
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
|
||||||
truth = val.Uint() != 0
|
|
||||||
case reflect.Struct:
|
|
||||||
truth = true // Struct values are always true.
|
|
||||||
default:
|
|
||||||
return
|
|
||||||
}
|
|
||||||
return truth, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) walkRange(dot reflect.Value, r *parse.RangeNode) {
|
|
||||||
s.at(r)
|
|
||||||
defer s.pop(s.mark())
|
|
||||||
val, _ := indirect(s.evalPipeline(dot, r.Pipe))
|
|
||||||
// mark top of stack before any variables in the body are pushed.
|
|
||||||
mark := s.mark()
|
|
||||||
oneIteration := func(index, elem reflect.Value) {
|
|
||||||
// Set top var (lexically the second if there are two) to the element.
|
|
||||||
if len(r.Pipe.Decl) > 0 {
|
|
||||||
s.setVar(1, elem)
|
|
||||||
}
|
|
||||||
// Set next var (lexically the first if there are two) to the index.
|
|
||||||
if len(r.Pipe.Decl) > 1 {
|
|
||||||
s.setVar(2, index)
|
|
||||||
}
|
|
||||||
s.walk(elem, r.List)
|
|
||||||
s.pop(mark)
|
|
||||||
}
|
|
||||||
switch val.Kind() {
|
|
||||||
case reflect.Array, reflect.Slice:
|
|
||||||
if val.Len() == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
for i := 0; i < val.Len(); i++ {
|
|
||||||
oneIteration(reflect.ValueOf(i), val.Index(i))
|
|
||||||
}
|
|
||||||
return
|
|
||||||
case reflect.Map:
|
|
||||||
if val.Len() == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
for _, key := range sortKeys(val.MapKeys()) {
|
|
||||||
oneIteration(key, val.MapIndex(key))
|
|
||||||
}
|
|
||||||
return
|
|
||||||
case reflect.Chan:
|
|
||||||
if val.IsNil() {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
i := 0
|
|
||||||
for ; ; i++ {
|
|
||||||
elem, ok := val.Recv()
|
|
||||||
if !ok {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
oneIteration(reflect.ValueOf(i), elem)
|
|
||||||
}
|
|
||||||
if i == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return
|
|
||||||
case reflect.Invalid:
|
|
||||||
break // An invalid value is likely a nil map, etc. and acts like an empty map.
|
|
||||||
default:
|
|
||||||
s.errorf("range can't iterate over %v", val)
|
|
||||||
}
|
|
||||||
if r.ElseList != nil {
|
|
||||||
s.walk(dot, r.ElseList)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) walkTemplate(dot reflect.Value, t *parse.TemplateNode) {
|
|
||||||
s.at(t)
|
|
||||||
tmpl := s.tmpl.tmpl[t.Name]
|
|
||||||
if tmpl == nil {
|
|
||||||
s.errorf("template %q not defined", t.Name)
|
|
||||||
}
|
|
||||||
// Variables declared by the pipeline persist.
|
|
||||||
dot = s.evalPipeline(dot, t.Pipe)
|
|
||||||
newState := *s
|
|
||||||
newState.tmpl = tmpl
|
|
||||||
// No dynamic scoping: template invocations inherit no variables.
|
|
||||||
newState.vars = []variable{{"$", dot}}
|
|
||||||
newState.walk(dot, tmpl.Root)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Eval functions evaluate pipelines, commands, and their elements and extract
|
|
||||||
// values from the data structure by examining fields, calling methods, and so on.
|
|
||||||
// The printing of those values happens only through walk functions.
|
|
||||||
|
|
||||||
// evalPipeline returns the value acquired by evaluating a pipeline. If the
|
|
||||||
// pipeline has a variable declaration, the variable will be pushed on the
|
|
||||||
// stack. Callers should therefore pop the stack after they are finished
|
|
||||||
// executing commands depending on the pipeline value.
|
|
||||||
func (s *state) evalPipeline(dot reflect.Value, pipe *parse.PipeNode) (value reflect.Value) {
|
|
||||||
if pipe == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
s.at(pipe)
|
|
||||||
for _, cmd := range pipe.Cmds {
|
|
||||||
value = s.evalCommand(dot, cmd, value) // previous value is this one's final arg.
|
|
||||||
// If the object has type interface{}, dig down one level to the thing inside.
|
|
||||||
if value.Kind() == reflect.Interface && value.Type().NumMethod() == 0 {
|
|
||||||
value = reflect.ValueOf(value.Interface()) // lovely!
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, variable := range pipe.Decl {
|
|
||||||
s.push(variable.Ident[0], value)
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) notAFunction(args []parse.Node, final reflect.Value) {
|
|
||||||
if len(args) > 1 || final.IsValid() {
|
|
||||||
s.errorf("can't give argument to non-function %s", args[0])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalCommand(dot reflect.Value, cmd *parse.CommandNode, final reflect.Value) reflect.Value {
|
|
||||||
firstWord := cmd.Args[0]
|
|
||||||
switch n := firstWord.(type) {
|
|
||||||
case *parse.FieldNode:
|
|
||||||
return s.evalFieldNode(dot, n, cmd.Args, final)
|
|
||||||
case *parse.ChainNode:
|
|
||||||
return s.evalChainNode(dot, n, cmd.Args, final)
|
|
||||||
case *parse.IdentifierNode:
|
|
||||||
// Must be a function.
|
|
||||||
return s.evalFunction(dot, n, cmd, cmd.Args, final)
|
|
||||||
case *parse.PipeNode:
|
|
||||||
// Parenthesized pipeline. The arguments are all inside the pipeline; final is ignored.
|
|
||||||
return s.evalPipeline(dot, n)
|
|
||||||
case *parse.VariableNode:
|
|
||||||
return s.evalVariableNode(dot, n, cmd.Args, final)
|
|
||||||
}
|
|
||||||
s.at(firstWord)
|
|
||||||
s.notAFunction(cmd.Args, final)
|
|
||||||
switch word := firstWord.(type) {
|
|
||||||
case *parse.BoolNode:
|
|
||||||
return reflect.ValueOf(word.True)
|
|
||||||
case *parse.DotNode:
|
|
||||||
return dot
|
|
||||||
case *parse.NilNode:
|
|
||||||
s.errorf("nil is not a command")
|
|
||||||
case *parse.NumberNode:
|
|
||||||
return s.idealConstant(word)
|
|
||||||
case *parse.StringNode:
|
|
||||||
return reflect.ValueOf(word.Text)
|
|
||||||
}
|
|
||||||
s.errorf("can't evaluate command %q", firstWord)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
// idealConstant is called to return the value of a number in a context where
|
|
||||||
// we don't know the type. In that case, the syntax of the number tells us
|
|
||||||
// its type, and we use Go rules to resolve. Note there is no such thing as
|
|
||||||
// a uint ideal constant in this situation - the value must be of int type.
|
|
||||||
func (s *state) idealConstant(constant *parse.NumberNode) reflect.Value {
|
|
||||||
// These are ideal constants but we don't know the type
|
|
||||||
// and we have no context. (If it was a method argument,
|
|
||||||
// we'd know what we need.) The syntax guides us to some extent.
|
|
||||||
s.at(constant)
|
|
||||||
switch {
|
|
||||||
case constant.IsComplex:
|
|
||||||
return reflect.ValueOf(constant.Complex128) // incontrovertible.
|
|
||||||
case constant.IsFloat && !isHexConstant(constant.Text) && strings.IndexAny(constant.Text, ".eE") >= 0:
|
|
||||||
return reflect.ValueOf(constant.Float64)
|
|
||||||
case constant.IsInt:
|
|
||||||
n := int(constant.Int64)
|
|
||||||
if int64(n) != constant.Int64 {
|
|
||||||
s.errorf("%s overflows int", constant.Text)
|
|
||||||
}
|
|
||||||
return reflect.ValueOf(n)
|
|
||||||
case constant.IsUint:
|
|
||||||
s.errorf("%s overflows int", constant.Text)
|
|
||||||
}
|
|
||||||
return zero
|
|
||||||
}
|
|
||||||
|
|
||||||
func isHexConstant(s string) bool {
|
|
||||||
return len(s) > 2 && s[0] == '0' && (s[1] == 'x' || s[1] == 'X')
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalFieldNode(dot reflect.Value, field *parse.FieldNode, args []parse.Node, final reflect.Value) reflect.Value {
|
|
||||||
s.at(field)
|
|
||||||
return s.evalFieldChain(dot, dot, field, field.Ident, args, final)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalChainNode(dot reflect.Value, chain *parse.ChainNode, args []parse.Node, final reflect.Value) reflect.Value {
|
|
||||||
s.at(chain)
|
|
||||||
// (pipe).Field1.Field2 has pipe as .Node, fields as .Field. Eval the pipeline, then the fields.
|
|
||||||
pipe := s.evalArg(dot, nil, chain.Node)
|
|
||||||
if len(chain.Field) == 0 {
|
|
||||||
s.errorf("internal error: no fields in evalChainNode")
|
|
||||||
}
|
|
||||||
return s.evalFieldChain(dot, pipe, chain, chain.Field, args, final)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalVariableNode(dot reflect.Value, variable *parse.VariableNode, args []parse.Node, final reflect.Value) reflect.Value {
|
|
||||||
// $x.Field has $x as the first ident, Field as the second. Eval the var, then the fields.
|
|
||||||
s.at(variable)
|
|
||||||
value := s.varValue(variable.Ident[0])
|
|
||||||
if len(variable.Ident) == 1 {
|
|
||||||
s.notAFunction(args, final)
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
return s.evalFieldChain(dot, value, variable, variable.Ident[1:], args, final)
|
|
||||||
}
|
|
||||||
|
|
||||||
// evalFieldChain evaluates .X.Y.Z possibly followed by arguments.
|
|
||||||
// dot is the environment in which to evaluate arguments, while
|
|
||||||
// receiver is the value being walked along the chain.
|
|
||||||
func (s *state) evalFieldChain(dot, receiver reflect.Value, node parse.Node, ident []string, args []parse.Node, final reflect.Value) reflect.Value {
|
|
||||||
n := len(ident)
|
|
||||||
for i := 0; i < n-1; i++ {
|
|
||||||
receiver = s.evalField(dot, ident[i], node, nil, zero, receiver)
|
|
||||||
}
|
|
||||||
// Now if it's a method, it gets the arguments.
|
|
||||||
return s.evalField(dot, ident[n-1], node, args, final, receiver)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalFunction(dot reflect.Value, node *parse.IdentifierNode, cmd parse.Node, args []parse.Node, final reflect.Value) reflect.Value {
|
|
||||||
s.at(node)
|
|
||||||
name := node.Ident
|
|
||||||
function, ok := findFunction(name, s.tmpl)
|
|
||||||
if !ok {
|
|
||||||
s.errorf("%q is not a defined function", name)
|
|
||||||
}
|
|
||||||
return s.evalCall(dot, function, cmd, name, args, final)
|
|
||||||
}
|
|
||||||
|
|
||||||
// evalField evaluates an expression like (.Field) or (.Field arg1 arg2).
|
|
||||||
// The 'final' argument represents the return value from the preceding
|
|
||||||
// value of the pipeline, if any.
|
|
||||||
func (s *state) evalField(dot reflect.Value, fieldName string, node parse.Node, args []parse.Node, final, receiver reflect.Value) reflect.Value {
|
|
||||||
if !receiver.IsValid() {
|
|
||||||
return zero
|
|
||||||
}
|
|
||||||
typ := receiver.Type()
|
|
||||||
receiver, _ = indirect(receiver)
|
|
||||||
// Unless it's an interface, need to get to a value of type *T to guarantee
|
|
||||||
// we see all methods of T and *T.
|
|
||||||
ptr := receiver
|
|
||||||
if ptr.Kind() != reflect.Interface && ptr.CanAddr() {
|
|
||||||
ptr = ptr.Addr()
|
|
||||||
}
|
|
||||||
if method := ptr.MethodByName(fieldName); method.IsValid() {
|
|
||||||
return s.evalCall(dot, method, node, fieldName, args, final)
|
|
||||||
}
|
|
||||||
hasArgs := len(args) > 1 || final.IsValid()
|
|
||||||
// It's not a method; must be a field of a struct or an element of a map. The receiver must not be nil.
|
|
||||||
receiver, isNil := indirect(receiver)
|
|
||||||
if isNil {
|
|
||||||
s.errorf("nil pointer evaluating %s.%s", typ, fieldName)
|
|
||||||
}
|
|
||||||
switch receiver.Kind() {
|
|
||||||
case reflect.Struct:
|
|
||||||
tField, ok := receiver.Type().FieldByName(fieldName)
|
|
||||||
if ok {
|
|
||||||
field := receiver.FieldByIndex(tField.Index)
|
|
||||||
if tField.PkgPath != "" { // field is unexported
|
|
||||||
s.errorf("%s is an unexported field of struct type %s", fieldName, typ)
|
|
||||||
}
|
|
||||||
// If it's a function, we must call it.
|
|
||||||
if hasArgs {
|
|
||||||
s.errorf("%s has arguments but cannot be invoked as function", fieldName)
|
|
||||||
}
|
|
||||||
return field
|
|
||||||
}
|
|
||||||
s.errorf("%s is not a field of struct type %s", fieldName, typ)
|
|
||||||
case reflect.Map:
|
|
||||||
// If it's a map, attempt to use the field name as a key.
|
|
||||||
nameVal := reflect.ValueOf(fieldName)
|
|
||||||
if nameVal.Type().AssignableTo(receiver.Type().Key()) {
|
|
||||||
if hasArgs {
|
|
||||||
s.errorf("%s is not a method but has arguments", fieldName)
|
|
||||||
}
|
|
||||||
return receiver.MapIndex(nameVal)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
s.errorf("can't evaluate field %s in type %s", fieldName, typ)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
errorType = reflect.TypeOf((*error)(nil)).Elem()
|
|
||||||
fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem()
|
|
||||||
)
|
|
||||||
|
|
||||||
// evalCall executes a function or method call. If it's a method, fun already has the receiver bound, so
|
|
||||||
// it looks just like a function call. The arg list, if non-nil, includes (in the manner of the shell), arg[0]
|
|
||||||
// as the function itself.
|
|
||||||
func (s *state) evalCall(dot, fun reflect.Value, node parse.Node, name string, args []parse.Node, final reflect.Value) reflect.Value {
|
|
||||||
if args != nil {
|
|
||||||
args = args[1:] // Zeroth arg is function name/node; not passed to function.
|
|
||||||
}
|
|
||||||
typ := fun.Type()
|
|
||||||
numIn := len(args)
|
|
||||||
if final.IsValid() {
|
|
||||||
numIn++
|
|
||||||
}
|
|
||||||
numFixed := len(args)
|
|
||||||
if typ.IsVariadic() {
|
|
||||||
numFixed = typ.NumIn() - 1 // last arg is the variadic one.
|
|
||||||
if numIn < numFixed {
|
|
||||||
s.errorf("wrong number of args for %s: want at least %d got %d", name, typ.NumIn()-1, len(args))
|
|
||||||
}
|
|
||||||
} else if numIn < typ.NumIn()-1 || !typ.IsVariadic() && numIn != typ.NumIn() {
|
|
||||||
s.errorf("wrong number of args for %s: want %d got %d", name, typ.NumIn(), len(args))
|
|
||||||
}
|
|
||||||
if !goodFunc(typ) {
|
|
||||||
// TODO: This could still be a confusing error; maybe goodFunc should provide info.
|
|
||||||
s.errorf("can't call method/function %q with %d results", name, typ.NumOut())
|
|
||||||
}
|
|
||||||
// Build the arg list.
|
|
||||||
argv := make([]reflect.Value, numIn)
|
|
||||||
// Args must be evaluated. Fixed args first.
|
|
||||||
i := 0
|
|
||||||
for ; i < numFixed && i < len(args); i++ {
|
|
||||||
argv[i] = s.evalArg(dot, typ.In(i), args[i])
|
|
||||||
}
|
|
||||||
// Now the ... args.
|
|
||||||
if typ.IsVariadic() {
|
|
||||||
argType := typ.In(typ.NumIn() - 1).Elem() // Argument is a slice.
|
|
||||||
for ; i < len(args); i++ {
|
|
||||||
argv[i] = s.evalArg(dot, argType, args[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Add final value if necessary.
|
|
||||||
if final.IsValid() {
|
|
||||||
t := typ.In(typ.NumIn() - 1)
|
|
||||||
if typ.IsVariadic() {
|
|
||||||
t = t.Elem()
|
|
||||||
}
|
|
||||||
argv[i] = s.validateType(final, t)
|
|
||||||
}
|
|
||||||
result := fun.Call(argv)
|
|
||||||
// If we have an error that is not nil, stop execution and return that error to the caller.
|
|
||||||
if len(result) == 2 && !result[1].IsNil() {
|
|
||||||
s.at(node)
|
|
||||||
s.errorf("error calling %s: %s", name, result[1].Interface().(error))
|
|
||||||
}
|
|
||||||
return result[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero.
|
|
||||||
func canBeNil(typ reflect.Type) bool {
|
|
||||||
switch typ.Kind() {
|
|
||||||
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// validateType guarantees that the value is valid and assignable to the type.
|
|
||||||
func (s *state) validateType(value reflect.Value, typ reflect.Type) reflect.Value {
|
|
||||||
if !value.IsValid() {
|
|
||||||
if typ == nil || canBeNil(typ) {
|
|
||||||
// An untyped nil interface{}. Accept as a proper nil value.
|
|
||||||
return reflect.Zero(typ)
|
|
||||||
}
|
|
||||||
s.errorf("invalid value; expected %s", typ)
|
|
||||||
}
|
|
||||||
if typ != nil && !value.Type().AssignableTo(typ) {
|
|
||||||
if value.Kind() == reflect.Interface && !value.IsNil() {
|
|
||||||
value = value.Elem()
|
|
||||||
if value.Type().AssignableTo(typ) {
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
// fallthrough
|
|
||||||
}
|
|
||||||
// Does one dereference or indirection work? We could do more, as we
|
|
||||||
// do with method receivers, but that gets messy and method receivers
|
|
||||||
// are much more constrained, so it makes more sense there than here.
|
|
||||||
// Besides, one is almost always all you need.
|
|
||||||
switch {
|
|
||||||
case value.Kind() == reflect.Ptr && value.Type().Elem().AssignableTo(typ):
|
|
||||||
value = value.Elem()
|
|
||||||
if !value.IsValid() {
|
|
||||||
s.errorf("dereference of nil pointer of type %s", typ)
|
|
||||||
}
|
|
||||||
case reflect.PtrTo(value.Type()).AssignableTo(typ) && value.CanAddr():
|
|
||||||
value = value.Addr()
|
|
||||||
default:
|
|
||||||
s.errorf("wrong type for value; expected %s; got %s", typ, value.Type())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalArg(dot reflect.Value, typ reflect.Type, n parse.Node) reflect.Value {
|
|
||||||
s.at(n)
|
|
||||||
switch arg := n.(type) {
|
|
||||||
case *parse.DotNode:
|
|
||||||
return s.validateType(dot, typ)
|
|
||||||
case *parse.NilNode:
|
|
||||||
if canBeNil(typ) {
|
|
||||||
return reflect.Zero(typ)
|
|
||||||
}
|
|
||||||
s.errorf("cannot assign nil to %s", typ)
|
|
||||||
case *parse.FieldNode:
|
|
||||||
return s.validateType(s.evalFieldNode(dot, arg, []parse.Node{n}, zero), typ)
|
|
||||||
case *parse.VariableNode:
|
|
||||||
return s.validateType(s.evalVariableNode(dot, arg, nil, zero), typ)
|
|
||||||
case *parse.PipeNode:
|
|
||||||
return s.validateType(s.evalPipeline(dot, arg), typ)
|
|
||||||
case *parse.IdentifierNode:
|
|
||||||
return s.evalFunction(dot, arg, arg, nil, zero)
|
|
||||||
case *parse.ChainNode:
|
|
||||||
return s.validateType(s.evalChainNode(dot, arg, nil, zero), typ)
|
|
||||||
}
|
|
||||||
switch typ.Kind() {
|
|
||||||
case reflect.Bool:
|
|
||||||
return s.evalBool(typ, n)
|
|
||||||
case reflect.Complex64, reflect.Complex128:
|
|
||||||
return s.evalComplex(typ, n)
|
|
||||||
case reflect.Float32, reflect.Float64:
|
|
||||||
return s.evalFloat(typ, n)
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
|
||||||
return s.evalInteger(typ, n)
|
|
||||||
case reflect.Interface:
|
|
||||||
if typ.NumMethod() == 0 {
|
|
||||||
return s.evalEmptyInterface(dot, n)
|
|
||||||
}
|
|
||||||
case reflect.String:
|
|
||||||
return s.evalString(typ, n)
|
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
|
||||||
return s.evalUnsignedInteger(typ, n)
|
|
||||||
}
|
|
||||||
s.errorf("can't handle %s for arg of type %s", n, typ)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalBool(typ reflect.Type, n parse.Node) reflect.Value {
|
|
||||||
s.at(n)
|
|
||||||
if n, ok := n.(*parse.BoolNode); ok {
|
|
||||||
value := reflect.New(typ).Elem()
|
|
||||||
value.SetBool(n.True)
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
s.errorf("expected bool; found %s", n)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalString(typ reflect.Type, n parse.Node) reflect.Value {
|
|
||||||
s.at(n)
|
|
||||||
if n, ok := n.(*parse.StringNode); ok {
|
|
||||||
value := reflect.New(typ).Elem()
|
|
||||||
value.SetString(n.Text)
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
s.errorf("expected string; found %s", n)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalInteger(typ reflect.Type, n parse.Node) reflect.Value {
|
|
||||||
s.at(n)
|
|
||||||
if n, ok := n.(*parse.NumberNode); ok && n.IsInt {
|
|
||||||
value := reflect.New(typ).Elem()
|
|
||||||
value.SetInt(n.Int64)
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
s.errorf("expected integer; found %s", n)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalUnsignedInteger(typ reflect.Type, n parse.Node) reflect.Value {
|
|
||||||
s.at(n)
|
|
||||||
if n, ok := n.(*parse.NumberNode); ok && n.IsUint {
|
|
||||||
value := reflect.New(typ).Elem()
|
|
||||||
value.SetUint(n.Uint64)
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
s.errorf("expected unsigned integer; found %s", n)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalFloat(typ reflect.Type, n parse.Node) reflect.Value {
|
|
||||||
s.at(n)
|
|
||||||
if n, ok := n.(*parse.NumberNode); ok && n.IsFloat {
|
|
||||||
value := reflect.New(typ).Elem()
|
|
||||||
value.SetFloat(n.Float64)
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
s.errorf("expected float; found %s", n)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalComplex(typ reflect.Type, n parse.Node) reflect.Value {
|
|
||||||
if n, ok := n.(*parse.NumberNode); ok && n.IsComplex {
|
|
||||||
value := reflect.New(typ).Elem()
|
|
||||||
value.SetComplex(n.Complex128)
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
s.errorf("expected complex; found %s", n)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *state) evalEmptyInterface(dot reflect.Value, n parse.Node) reflect.Value {
|
|
||||||
s.at(n)
|
|
||||||
switch n := n.(type) {
|
|
||||||
case *parse.BoolNode:
|
|
||||||
return reflect.ValueOf(n.True)
|
|
||||||
case *parse.DotNode:
|
|
||||||
return dot
|
|
||||||
case *parse.FieldNode:
|
|
||||||
return s.evalFieldNode(dot, n, nil, zero)
|
|
||||||
case *parse.IdentifierNode:
|
|
||||||
return s.evalFunction(dot, n, n, nil, zero)
|
|
||||||
case *parse.NilNode:
|
|
||||||
// NilNode is handled in evalArg, the only place that calls here.
|
|
||||||
s.errorf("evalEmptyInterface: nil (can't happen)")
|
|
||||||
case *parse.NumberNode:
|
|
||||||
return s.idealConstant(n)
|
|
||||||
case *parse.StringNode:
|
|
||||||
return reflect.ValueOf(n.Text)
|
|
||||||
case *parse.VariableNode:
|
|
||||||
return s.evalVariableNode(dot, n, nil, zero)
|
|
||||||
case *parse.PipeNode:
|
|
||||||
return s.evalPipeline(dot, n)
|
|
||||||
}
|
|
||||||
s.errorf("can't handle assignment of %s to empty interface argument", n)
|
|
||||||
panic("not reached")
|
|
||||||
}
|
|
||||||
|
|
||||||
// indirect returns the item at the end of indirection, and a bool to indicate if it's nil.
|
|
||||||
// We indirect through pointers and empty interfaces (only) because
|
|
||||||
// non-empty interfaces have methods we might need.
|
|
||||||
func indirect(v reflect.Value) (rv reflect.Value, isNil bool) {
|
|
||||||
for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() {
|
|
||||||
if v.IsNil() {
|
|
||||||
return v, true
|
|
||||||
}
|
|
||||||
if v.Kind() == reflect.Interface && v.NumMethod() > 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return v, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// printValue writes the textual representation of the value to the output of
|
|
||||||
// the template.
|
|
||||||
func (s *state) printValue(n parse.Node, v reflect.Value) {
|
|
||||||
s.at(n)
|
|
||||||
iface, ok := printableValue(v)
|
|
||||||
if !ok {
|
|
||||||
s.errorf("can't print %s of type %s", n, v.Type())
|
|
||||||
}
|
|
||||||
fmt.Fprint(s.wr, iface)
|
|
||||||
}
|
|
||||||
|
|
||||||
// printableValue returns the, possibly indirected, interface value inside v that
|
|
||||||
// is best for a call to formatted printer.
|
|
||||||
func printableValue(v reflect.Value) (interface{}, bool) {
|
|
||||||
if v.Kind() == reflect.Ptr {
|
|
||||||
v, _ = indirect(v) // fmt.Fprint handles nil.
|
|
||||||
}
|
|
||||||
if !v.IsValid() {
|
|
||||||
return "<no value>", true
|
|
||||||
}
|
|
||||||
|
|
||||||
if !v.Type().Implements(errorType) && !v.Type().Implements(fmtStringerType) {
|
|
||||||
if v.CanAddr() && (reflect.PtrTo(v.Type()).Implements(errorType) || reflect.PtrTo(v.Type()).Implements(fmtStringerType)) {
|
|
||||||
v = v.Addr()
|
|
||||||
} else {
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.Chan, reflect.Func:
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return v.Interface(), true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Types to help sort the keys in a map for reproducible output.
|
|
||||||
|
|
||||||
type rvs []reflect.Value
|
|
||||||
|
|
||||||
func (x rvs) Len() int { return len(x) }
|
|
||||||
func (x rvs) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
|
||||||
|
|
||||||
type rvInts struct{ rvs }
|
|
||||||
|
|
||||||
func (x rvInts) Less(i, j int) bool { return x.rvs[i].Int() < x.rvs[j].Int() }
|
|
||||||
|
|
||||||
type rvUints struct{ rvs }
|
|
||||||
|
|
||||||
func (x rvUints) Less(i, j int) bool { return x.rvs[i].Uint() < x.rvs[j].Uint() }
|
|
||||||
|
|
||||||
type rvFloats struct{ rvs }
|
|
||||||
|
|
||||||
func (x rvFloats) Less(i, j int) bool { return x.rvs[i].Float() < x.rvs[j].Float() }
|
|
||||||
|
|
||||||
type rvStrings struct{ rvs }
|
|
||||||
|
|
||||||
func (x rvStrings) Less(i, j int) bool { return x.rvs[i].String() < x.rvs[j].String() }
|
|
||||||
|
|
||||||
// sortKeys sorts (if it can) the slice of reflect.Values, which is a slice of map keys.
|
|
||||||
func sortKeys(v []reflect.Value) []reflect.Value {
|
|
||||||
if len(v) <= 1 {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
switch v[0].Kind() {
|
|
||||||
case reflect.Float32, reflect.Float64:
|
|
||||||
sort.Sort(rvFloats{v})
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
|
||||||
sort.Sort(rvInts{v})
|
|
||||||
case reflect.String:
|
|
||||||
sort.Sort(rvStrings{v})
|
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
|
||||||
sort.Sort(rvUints{v})
|
|
||||||
}
|
|
||||||
return v
|
|
||||||
}
|
|
598
vendor/github.com/alecthomas/template/funcs.go
generated
vendored
598
vendor/github.com/alecthomas/template/funcs.go
generated
vendored
@ -1,598 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package template
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/url"
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// FuncMap is the type of the map defining the mapping from names to functions.
|
|
||||||
// Each function must have either a single return value, or two return values of
|
|
||||||
// which the second has type error. In that case, if the second (error)
|
|
||||||
// return value evaluates to non-nil during execution, execution terminates and
|
|
||||||
// Execute returns that error.
|
|
||||||
type FuncMap map[string]interface{}
|
|
||||||
|
|
||||||
var builtins = FuncMap{
|
|
||||||
"and": and,
|
|
||||||
"call": call,
|
|
||||||
"html": HTMLEscaper,
|
|
||||||
"index": index,
|
|
||||||
"js": JSEscaper,
|
|
||||||
"len": length,
|
|
||||||
"not": not,
|
|
||||||
"or": or,
|
|
||||||
"print": fmt.Sprint,
|
|
||||||
"printf": fmt.Sprintf,
|
|
||||||
"println": fmt.Sprintln,
|
|
||||||
"urlquery": URLQueryEscaper,
|
|
||||||
|
|
||||||
// Comparisons
|
|
||||||
"eq": eq, // ==
|
|
||||||
"ge": ge, // >=
|
|
||||||
"gt": gt, // >
|
|
||||||
"le": le, // <=
|
|
||||||
"lt": lt, // <
|
|
||||||
"ne": ne, // !=
|
|
||||||
}
|
|
||||||
|
|
||||||
var builtinFuncs = createValueFuncs(builtins)
|
|
||||||
|
|
||||||
// createValueFuncs turns a FuncMap into a map[string]reflect.Value
|
|
||||||
func createValueFuncs(funcMap FuncMap) map[string]reflect.Value {
|
|
||||||
m := make(map[string]reflect.Value)
|
|
||||||
addValueFuncs(m, funcMap)
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
|
|
||||||
// addValueFuncs adds to values the functions in funcs, converting them to reflect.Values.
|
|
||||||
func addValueFuncs(out map[string]reflect.Value, in FuncMap) {
|
|
||||||
for name, fn := range in {
|
|
||||||
v := reflect.ValueOf(fn)
|
|
||||||
if v.Kind() != reflect.Func {
|
|
||||||
panic("value for " + name + " not a function")
|
|
||||||
}
|
|
||||||
if !goodFunc(v.Type()) {
|
|
||||||
panic(fmt.Errorf("can't install method/function %q with %d results", name, v.Type().NumOut()))
|
|
||||||
}
|
|
||||||
out[name] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// addFuncs adds to values the functions in funcs. It does no checking of the input -
|
|
||||||
// call addValueFuncs first.
|
|
||||||
func addFuncs(out, in FuncMap) {
|
|
||||||
for name, fn := range in {
|
|
||||||
out[name] = fn
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// goodFunc checks that the function or method has the right result signature.
|
|
||||||
func goodFunc(typ reflect.Type) bool {
|
|
||||||
// We allow functions with 1 result or 2 results where the second is an error.
|
|
||||||
switch {
|
|
||||||
case typ.NumOut() == 1:
|
|
||||||
return true
|
|
||||||
case typ.NumOut() == 2 && typ.Out(1) == errorType:
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// findFunction looks for a function in the template, and global map.
|
|
||||||
func findFunction(name string, tmpl *Template) (reflect.Value, bool) {
|
|
||||||
if tmpl != nil && tmpl.common != nil {
|
|
||||||
if fn := tmpl.execFuncs[name]; fn.IsValid() {
|
|
||||||
return fn, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if fn := builtinFuncs[name]; fn.IsValid() {
|
|
||||||
return fn, true
|
|
||||||
}
|
|
||||||
return reflect.Value{}, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Indexing.
|
|
||||||
|
|
||||||
// index returns the result of indexing its first argument by the following
|
|
||||||
// arguments. Thus "index x 1 2 3" is, in Go syntax, x[1][2][3]. Each
|
|
||||||
// indexed item must be a map, slice, or array.
|
|
||||||
func index(item interface{}, indices ...interface{}) (interface{}, error) {
|
|
||||||
v := reflect.ValueOf(item)
|
|
||||||
for _, i := range indices {
|
|
||||||
index := reflect.ValueOf(i)
|
|
||||||
var isNil bool
|
|
||||||
if v, isNil = indirect(v); isNil {
|
|
||||||
return nil, fmt.Errorf("index of nil pointer")
|
|
||||||
}
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.Array, reflect.Slice, reflect.String:
|
|
||||||
var x int64
|
|
||||||
switch index.Kind() {
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
|
||||||
x = index.Int()
|
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
|
||||||
x = int64(index.Uint())
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("cannot index slice/array with type %s", index.Type())
|
|
||||||
}
|
|
||||||
if x < 0 || x >= int64(v.Len()) {
|
|
||||||
return nil, fmt.Errorf("index out of range: %d", x)
|
|
||||||
}
|
|
||||||
v = v.Index(int(x))
|
|
||||||
case reflect.Map:
|
|
||||||
if !index.IsValid() {
|
|
||||||
index = reflect.Zero(v.Type().Key())
|
|
||||||
}
|
|
||||||
if !index.Type().AssignableTo(v.Type().Key()) {
|
|
||||||
return nil, fmt.Errorf("%s is not index type for %s", index.Type(), v.Type())
|
|
||||||
}
|
|
||||||
if x := v.MapIndex(index); x.IsValid() {
|
|
||||||
v = x
|
|
||||||
} else {
|
|
||||||
v = reflect.Zero(v.Type().Elem())
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("can't index item of type %s", v.Type())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return v.Interface(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Length
|
|
||||||
|
|
||||||
// length returns the length of the item, with an error if it has no defined length.
|
|
||||||
func length(item interface{}) (int, error) {
|
|
||||||
v, isNil := indirect(reflect.ValueOf(item))
|
|
||||||
if isNil {
|
|
||||||
return 0, fmt.Errorf("len of nil pointer")
|
|
||||||
}
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
|
|
||||||
return v.Len(), nil
|
|
||||||
}
|
|
||||||
return 0, fmt.Errorf("len of type %s", v.Type())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function invocation
|
|
||||||
|
|
||||||
// call returns the result of evaluating the first argument as a function.
|
|
||||||
// The function must return 1 result, or 2 results, the second of which is an error.
|
|
||||||
func call(fn interface{}, args ...interface{}) (interface{}, error) {
|
|
||||||
v := reflect.ValueOf(fn)
|
|
||||||
typ := v.Type()
|
|
||||||
if typ.Kind() != reflect.Func {
|
|
||||||
return nil, fmt.Errorf("non-function of type %s", typ)
|
|
||||||
}
|
|
||||||
if !goodFunc(typ) {
|
|
||||||
return nil, fmt.Errorf("function called with %d args; should be 1 or 2", typ.NumOut())
|
|
||||||
}
|
|
||||||
numIn := typ.NumIn()
|
|
||||||
var dddType reflect.Type
|
|
||||||
if typ.IsVariadic() {
|
|
||||||
if len(args) < numIn-1 {
|
|
||||||
return nil, fmt.Errorf("wrong number of args: got %d want at least %d", len(args), numIn-1)
|
|
||||||
}
|
|
||||||
dddType = typ.In(numIn - 1).Elem()
|
|
||||||
} else {
|
|
||||||
if len(args) != numIn {
|
|
||||||
return nil, fmt.Errorf("wrong number of args: got %d want %d", len(args), numIn)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
argv := make([]reflect.Value, len(args))
|
|
||||||
for i, arg := range args {
|
|
||||||
value := reflect.ValueOf(arg)
|
|
||||||
// Compute the expected type. Clumsy because of variadics.
|
|
||||||
var argType reflect.Type
|
|
||||||
if !typ.IsVariadic() || i < numIn-1 {
|
|
||||||
argType = typ.In(i)
|
|
||||||
} else {
|
|
||||||
argType = dddType
|
|
||||||
}
|
|
||||||
if !value.IsValid() && canBeNil(argType) {
|
|
||||||
value = reflect.Zero(argType)
|
|
||||||
}
|
|
||||||
if !value.Type().AssignableTo(argType) {
|
|
||||||
return nil, fmt.Errorf("arg %d has type %s; should be %s", i, value.Type(), argType)
|
|
||||||
}
|
|
||||||
argv[i] = value
|
|
||||||
}
|
|
||||||
result := v.Call(argv)
|
|
||||||
if len(result) == 2 && !result[1].IsNil() {
|
|
||||||
return result[0].Interface(), result[1].Interface().(error)
|
|
||||||
}
|
|
||||||
return result[0].Interface(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Boolean logic.
|
|
||||||
|
|
||||||
func truth(a interface{}) bool {
|
|
||||||
t, _ := isTrue(reflect.ValueOf(a))
|
|
||||||
return t
|
|
||||||
}
|
|
||||||
|
|
||||||
// and computes the Boolean AND of its arguments, returning
|
|
||||||
// the first false argument it encounters, or the last argument.
|
|
||||||
func and(arg0 interface{}, args ...interface{}) interface{} {
|
|
||||||
if !truth(arg0) {
|
|
||||||
return arg0
|
|
||||||
}
|
|
||||||
for i := range args {
|
|
||||||
arg0 = args[i]
|
|
||||||
if !truth(arg0) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return arg0
|
|
||||||
}
|
|
||||||
|
|
||||||
// or computes the Boolean OR of its arguments, returning
|
|
||||||
// the first true argument it encounters, or the last argument.
|
|
||||||
func or(arg0 interface{}, args ...interface{}) interface{} {
|
|
||||||
if truth(arg0) {
|
|
||||||
return arg0
|
|
||||||
}
|
|
||||||
for i := range args {
|
|
||||||
arg0 = args[i]
|
|
||||||
if truth(arg0) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return arg0
|
|
||||||
}
|
|
||||||
|
|
||||||
// not returns the Boolean negation of its argument.
|
|
||||||
func not(arg interface{}) (truth bool) {
|
|
||||||
truth, _ = isTrue(reflect.ValueOf(arg))
|
|
||||||
return !truth
|
|
||||||
}
|
|
||||||
|
|
||||||
// Comparison.
|
|
||||||
|
|
||||||
// TODO: Perhaps allow comparison between signed and unsigned integers.
|
|
||||||
|
|
||||||
var (
|
|
||||||
errBadComparisonType = errors.New("invalid type for comparison")
|
|
||||||
errBadComparison = errors.New("incompatible types for comparison")
|
|
||||||
errNoComparison = errors.New("missing argument for comparison")
|
|
||||||
)
|
|
||||||
|
|
||||||
type kind int
|
|
||||||
|
|
||||||
const (
|
|
||||||
invalidKind kind = iota
|
|
||||||
boolKind
|
|
||||||
complexKind
|
|
||||||
intKind
|
|
||||||
floatKind
|
|
||||||
integerKind
|
|
||||||
stringKind
|
|
||||||
uintKind
|
|
||||||
)
|
|
||||||
|
|
||||||
func basicKind(v reflect.Value) (kind, error) {
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.Bool:
|
|
||||||
return boolKind, nil
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
|
||||||
return intKind, nil
|
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
|
||||||
return uintKind, nil
|
|
||||||
case reflect.Float32, reflect.Float64:
|
|
||||||
return floatKind, nil
|
|
||||||
case reflect.Complex64, reflect.Complex128:
|
|
||||||
return complexKind, nil
|
|
||||||
case reflect.String:
|
|
||||||
return stringKind, nil
|
|
||||||
}
|
|
||||||
return invalidKind, errBadComparisonType
|
|
||||||
}
|
|
||||||
|
|
||||||
// eq evaluates the comparison a == b || a == c || ...
|
|
||||||
func eq(arg1 interface{}, arg2 ...interface{}) (bool, error) {
|
|
||||||
v1 := reflect.ValueOf(arg1)
|
|
||||||
k1, err := basicKind(v1)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
if len(arg2) == 0 {
|
|
||||||
return false, errNoComparison
|
|
||||||
}
|
|
||||||
for _, arg := range arg2 {
|
|
||||||
v2 := reflect.ValueOf(arg)
|
|
||||||
k2, err := basicKind(v2)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
truth := false
|
|
||||||
if k1 != k2 {
|
|
||||||
// Special case: Can compare integer values regardless of type's sign.
|
|
||||||
switch {
|
|
||||||
case k1 == intKind && k2 == uintKind:
|
|
||||||
truth = v1.Int() >= 0 && uint64(v1.Int()) == v2.Uint()
|
|
||||||
case k1 == uintKind && k2 == intKind:
|
|
||||||
truth = v2.Int() >= 0 && v1.Uint() == uint64(v2.Int())
|
|
||||||
default:
|
|
||||||
return false, errBadComparison
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
switch k1 {
|
|
||||||
case boolKind:
|
|
||||||
truth = v1.Bool() == v2.Bool()
|
|
||||||
case complexKind:
|
|
||||||
truth = v1.Complex() == v2.Complex()
|
|
||||||
case floatKind:
|
|
||||||
truth = v1.Float() == v2.Float()
|
|
||||||
case intKind:
|
|
||||||
truth = v1.Int() == v2.Int()
|
|
||||||
case stringKind:
|
|
||||||
truth = v1.String() == v2.String()
|
|
||||||
case uintKind:
|
|
||||||
truth = v1.Uint() == v2.Uint()
|
|
||||||
default:
|
|
||||||
panic("invalid kind")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if truth {
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ne evaluates the comparison a != b.
|
|
||||||
func ne(arg1, arg2 interface{}) (bool, error) {
|
|
||||||
// != is the inverse of ==.
|
|
||||||
equal, err := eq(arg1, arg2)
|
|
||||||
return !equal, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// lt evaluates the comparison a < b.
|
|
||||||
func lt(arg1, arg2 interface{}) (bool, error) {
|
|
||||||
v1 := reflect.ValueOf(arg1)
|
|
||||||
k1, err := basicKind(v1)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
v2 := reflect.ValueOf(arg2)
|
|
||||||
k2, err := basicKind(v2)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
truth := false
|
|
||||||
if k1 != k2 {
|
|
||||||
// Special case: Can compare integer values regardless of type's sign.
|
|
||||||
switch {
|
|
||||||
case k1 == intKind && k2 == uintKind:
|
|
||||||
truth = v1.Int() < 0 || uint64(v1.Int()) < v2.Uint()
|
|
||||||
case k1 == uintKind && k2 == intKind:
|
|
||||||
truth = v2.Int() >= 0 && v1.Uint() < uint64(v2.Int())
|
|
||||||
default:
|
|
||||||
return false, errBadComparison
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
switch k1 {
|
|
||||||
case boolKind, complexKind:
|
|
||||||
return false, errBadComparisonType
|
|
||||||
case floatKind:
|
|
||||||
truth = v1.Float() < v2.Float()
|
|
||||||
case intKind:
|
|
||||||
truth = v1.Int() < v2.Int()
|
|
||||||
case stringKind:
|
|
||||||
truth = v1.String() < v2.String()
|
|
||||||
case uintKind:
|
|
||||||
truth = v1.Uint() < v2.Uint()
|
|
||||||
default:
|
|
||||||
panic("invalid kind")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return truth, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// le evaluates the comparison <= b.
|
|
||||||
func le(arg1, arg2 interface{}) (bool, error) {
|
|
||||||
// <= is < or ==.
|
|
||||||
lessThan, err := lt(arg1, arg2)
|
|
||||||
if lessThan || err != nil {
|
|
||||||
return lessThan, err
|
|
||||||
}
|
|
||||||
return eq(arg1, arg2)
|
|
||||||
}
|
|
||||||
|
|
||||||
// gt evaluates the comparison a > b.
|
|
||||||
func gt(arg1, arg2 interface{}) (bool, error) {
|
|
||||||
// > is the inverse of <=.
|
|
||||||
lessOrEqual, err := le(arg1, arg2)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
return !lessOrEqual, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ge evaluates the comparison a >= b.
|
|
||||||
func ge(arg1, arg2 interface{}) (bool, error) {
|
|
||||||
// >= is the inverse of <.
|
|
||||||
lessThan, err := lt(arg1, arg2)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
return !lessThan, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTML escaping.
|
|
||||||
|
|
||||||
var (
|
|
||||||
htmlQuot = []byte(""") // shorter than """
|
|
||||||
htmlApos = []byte("'") // shorter than "'" and apos was not in HTML until HTML5
|
|
||||||
htmlAmp = []byte("&")
|
|
||||||
htmlLt = []byte("<")
|
|
||||||
htmlGt = []byte(">")
|
|
||||||
)
|
|
||||||
|
|
||||||
// HTMLEscape writes to w the escaped HTML equivalent of the plain text data b.
|
|
||||||
func HTMLEscape(w io.Writer, b []byte) {
|
|
||||||
last := 0
|
|
||||||
for i, c := range b {
|
|
||||||
var html []byte
|
|
||||||
switch c {
|
|
||||||
case '"':
|
|
||||||
html = htmlQuot
|
|
||||||
case '\'':
|
|
||||||
html = htmlApos
|
|
||||||
case '&':
|
|
||||||
html = htmlAmp
|
|
||||||
case '<':
|
|
||||||
html = htmlLt
|
|
||||||
case '>':
|
|
||||||
html = htmlGt
|
|
||||||
default:
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
w.Write(b[last:i])
|
|
||||||
w.Write(html)
|
|
||||||
last = i + 1
|
|
||||||
}
|
|
||||||
w.Write(b[last:])
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTMLEscapeString returns the escaped HTML equivalent of the plain text data s.
|
|
||||||
func HTMLEscapeString(s string) string {
|
|
||||||
// Avoid allocation if we can.
|
|
||||||
if strings.IndexAny(s, `'"&<>`) < 0 {
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
var b bytes.Buffer
|
|
||||||
HTMLEscape(&b, []byte(s))
|
|
||||||
return b.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTMLEscaper returns the escaped HTML equivalent of the textual
|
|
||||||
// representation of its arguments.
|
|
||||||
func HTMLEscaper(args ...interface{}) string {
|
|
||||||
return HTMLEscapeString(evalArgs(args))
|
|
||||||
}
|
|
||||||
|
|
||||||
// JavaScript escaping.
|
|
||||||
|
|
||||||
var (
|
|
||||||
jsLowUni = []byte(`\u00`)
|
|
||||||
hex = []byte("0123456789ABCDEF")
|
|
||||||
|
|
||||||
jsBackslash = []byte(`\\`)
|
|
||||||
jsApos = []byte(`\'`)
|
|
||||||
jsQuot = []byte(`\"`)
|
|
||||||
jsLt = []byte(`\x3C`)
|
|
||||||
jsGt = []byte(`\x3E`)
|
|
||||||
)
|
|
||||||
|
|
||||||
// JSEscape writes to w the escaped JavaScript equivalent of the plain text data b.
|
|
||||||
func JSEscape(w io.Writer, b []byte) {
|
|
||||||
last := 0
|
|
||||||
for i := 0; i < len(b); i++ {
|
|
||||||
c := b[i]
|
|
||||||
|
|
||||||
if !jsIsSpecial(rune(c)) {
|
|
||||||
// fast path: nothing to do
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
w.Write(b[last:i])
|
|
||||||
|
|
||||||
if c < utf8.RuneSelf {
|
|
||||||
// Quotes, slashes and angle brackets get quoted.
|
|
||||||
// Control characters get written as \u00XX.
|
|
||||||
switch c {
|
|
||||||
case '\\':
|
|
||||||
w.Write(jsBackslash)
|
|
||||||
case '\'':
|
|
||||||
w.Write(jsApos)
|
|
||||||
case '"':
|
|
||||||
w.Write(jsQuot)
|
|
||||||
case '<':
|
|
||||||
w.Write(jsLt)
|
|
||||||
case '>':
|
|
||||||
w.Write(jsGt)
|
|
||||||
default:
|
|
||||||
w.Write(jsLowUni)
|
|
||||||
t, b := c>>4, c&0x0f
|
|
||||||
w.Write(hex[t : t+1])
|
|
||||||
w.Write(hex[b : b+1])
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Unicode rune.
|
|
||||||
r, size := utf8.DecodeRune(b[i:])
|
|
||||||
if unicode.IsPrint(r) {
|
|
||||||
w.Write(b[i : i+size])
|
|
||||||
} else {
|
|
||||||
fmt.Fprintf(w, "\\u%04X", r)
|
|
||||||
}
|
|
||||||
i += size - 1
|
|
||||||
}
|
|
||||||
last = i + 1
|
|
||||||
}
|
|
||||||
w.Write(b[last:])
|
|
||||||
}
|
|
||||||
|
|
||||||
// JSEscapeString returns the escaped JavaScript equivalent of the plain text data s.
|
|
||||||
func JSEscapeString(s string) string {
|
|
||||||
// Avoid allocation if we can.
|
|
||||||
if strings.IndexFunc(s, jsIsSpecial) < 0 {
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
var b bytes.Buffer
|
|
||||||
JSEscape(&b, []byte(s))
|
|
||||||
return b.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func jsIsSpecial(r rune) bool {
|
|
||||||
switch r {
|
|
||||||
case '\\', '\'', '"', '<', '>':
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return r < ' ' || utf8.RuneSelf <= r
|
|
||||||
}
|
|
||||||
|
|
||||||
// JSEscaper returns the escaped JavaScript equivalent of the textual
|
|
||||||
// representation of its arguments.
|
|
||||||
func JSEscaper(args ...interface{}) string {
|
|
||||||
return JSEscapeString(evalArgs(args))
|
|
||||||
}
|
|
||||||
|
|
||||||
// URLQueryEscaper returns the escaped value of the textual representation of
|
|
||||||
// its arguments in a form suitable for embedding in a URL query.
|
|
||||||
func URLQueryEscaper(args ...interface{}) string {
|
|
||||||
return url.QueryEscape(evalArgs(args))
|
|
||||||
}
|
|
||||||
|
|
||||||
// evalArgs formats the list of arguments into a string. It is therefore equivalent to
|
|
||||||
// fmt.Sprint(args...)
|
|
||||||
// except that each argument is indirected (if a pointer), as required,
|
|
||||||
// using the same rules as the default string evaluation during template
|
|
||||||
// execution.
|
|
||||||
func evalArgs(args []interface{}) string {
|
|
||||||
ok := false
|
|
||||||
var s string
|
|
||||||
// Fast path for simple common case.
|
|
||||||
if len(args) == 1 {
|
|
||||||
s, ok = args[0].(string)
|
|
||||||
}
|
|
||||||
if !ok {
|
|
||||||
for i, arg := range args {
|
|
||||||
a, ok := printableValue(reflect.ValueOf(arg))
|
|
||||||
if ok {
|
|
||||||
args[i] = a
|
|
||||||
} // else left fmt do its thing
|
|
||||||
}
|
|
||||||
s = fmt.Sprint(args...)
|
|
||||||
}
|
|
||||||
return s
|
|
||||||
}
|
|
1
vendor/github.com/alecthomas/template/go.mod
generated
vendored
1
vendor/github.com/alecthomas/template/go.mod
generated
vendored
@ -1 +0,0 @@
|
|||||||
module github.com/alecthomas/template
|
|
108
vendor/github.com/alecthomas/template/helper.go
generated
vendored
108
vendor/github.com/alecthomas/template/helper.go
generated
vendored
@ -1,108 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// Helper functions to make constructing templates easier.
|
|
||||||
|
|
||||||
package template
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io/ioutil"
|
|
||||||
"path/filepath"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Functions and methods to parse templates.
|
|
||||||
|
|
||||||
// Must is a helper that wraps a call to a function returning (*Template, error)
|
|
||||||
// and panics if the error is non-nil. It is intended for use in variable
|
|
||||||
// initializations such as
|
|
||||||
// var t = template.Must(template.New("name").Parse("text"))
|
|
||||||
func Must(t *Template, err error) *Template {
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return t
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseFiles creates a new Template and parses the template definitions from
|
|
||||||
// the named files. The returned template's name will have the (base) name and
|
|
||||||
// (parsed) contents of the first file. There must be at least one file.
|
|
||||||
// If an error occurs, parsing stops and the returned *Template is nil.
|
|
||||||
func ParseFiles(filenames ...string) (*Template, error) {
|
|
||||||
return parseFiles(nil, filenames...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseFiles parses the named files and associates the resulting templates with
|
|
||||||
// t. If an error occurs, parsing stops and the returned template is nil;
|
|
||||||
// otherwise it is t. There must be at least one file.
|
|
||||||
func (t *Template) ParseFiles(filenames ...string) (*Template, error) {
|
|
||||||
return parseFiles(t, filenames...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseFiles is the helper for the method and function. If the argument
|
|
||||||
// template is nil, it is created from the first file.
|
|
||||||
func parseFiles(t *Template, filenames ...string) (*Template, error) {
|
|
||||||
if len(filenames) == 0 {
|
|
||||||
// Not really a problem, but be consistent.
|
|
||||||
return nil, fmt.Errorf("template: no files named in call to ParseFiles")
|
|
||||||
}
|
|
||||||
for _, filename := range filenames {
|
|
||||||
b, err := ioutil.ReadFile(filename)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
s := string(b)
|
|
||||||
name := filepath.Base(filename)
|
|
||||||
// First template becomes return value if not already defined,
|
|
||||||
// and we use that one for subsequent New calls to associate
|
|
||||||
// all the templates together. Also, if this file has the same name
|
|
||||||
// as t, this file becomes the contents of t, so
|
|
||||||
// t, err := New(name).Funcs(xxx).ParseFiles(name)
|
|
||||||
// works. Otherwise we create a new template associated with t.
|
|
||||||
var tmpl *Template
|
|
||||||
if t == nil {
|
|
||||||
t = New(name)
|
|
||||||
}
|
|
||||||
if name == t.Name() {
|
|
||||||
tmpl = t
|
|
||||||
} else {
|
|
||||||
tmpl = t.New(name)
|
|
||||||
}
|
|
||||||
_, err = tmpl.Parse(s)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return t, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseGlob creates a new Template and parses the template definitions from the
|
|
||||||
// files identified by the pattern, which must match at least one file. The
|
|
||||||
// returned template will have the (base) name and (parsed) contents of the
|
|
||||||
// first file matched by the pattern. ParseGlob is equivalent to calling
|
|
||||||
// ParseFiles with the list of files matched by the pattern.
|
|
||||||
func ParseGlob(pattern string) (*Template, error) {
|
|
||||||
return parseGlob(nil, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseGlob parses the template definitions in the files identified by the
|
|
||||||
// pattern and associates the resulting templates with t. The pattern is
|
|
||||||
// processed by filepath.Glob and must match at least one file. ParseGlob is
|
|
||||||
// equivalent to calling t.ParseFiles with the list of files matched by the
|
|
||||||
// pattern.
|
|
||||||
func (t *Template) ParseGlob(pattern string) (*Template, error) {
|
|
||||||
return parseGlob(t, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseGlob is the implementation of the function and method ParseGlob.
|
|
||||||
func parseGlob(t *Template, pattern string) (*Template, error) {
|
|
||||||
filenames, err := filepath.Glob(pattern)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if len(filenames) == 0 {
|
|
||||||
return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern)
|
|
||||||
}
|
|
||||||
return parseFiles(t, filenames...)
|
|
||||||
}
|
|
556
vendor/github.com/alecthomas/template/parse/lex.go
generated
vendored
556
vendor/github.com/alecthomas/template/parse/lex.go
generated
vendored
@ -1,556 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package parse
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// item represents a token or text string returned from the scanner.
|
|
||||||
type item struct {
|
|
||||||
typ itemType // The type of this item.
|
|
||||||
pos Pos // The starting position, in bytes, of this item in the input string.
|
|
||||||
val string // The value of this item.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i item) String() string {
|
|
||||||
switch {
|
|
||||||
case i.typ == itemEOF:
|
|
||||||
return "EOF"
|
|
||||||
case i.typ == itemError:
|
|
||||||
return i.val
|
|
||||||
case i.typ > itemKeyword:
|
|
||||||
return fmt.Sprintf("<%s>", i.val)
|
|
||||||
case len(i.val) > 10:
|
|
||||||
return fmt.Sprintf("%.10q...", i.val)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%q", i.val)
|
|
||||||
}
|
|
||||||
|
|
||||||
// itemType identifies the type of lex items.
|
|
||||||
type itemType int
|
|
||||||
|
|
||||||
const (
|
|
||||||
itemError itemType = iota // error occurred; value is text of error
|
|
||||||
itemBool // boolean constant
|
|
||||||
itemChar // printable ASCII character; grab bag for comma etc.
|
|
||||||
itemCharConstant // character constant
|
|
||||||
itemComplex // complex constant (1+2i); imaginary is just a number
|
|
||||||
itemColonEquals // colon-equals (':=') introducing a declaration
|
|
||||||
itemEOF
|
|
||||||
itemField // alphanumeric identifier starting with '.'
|
|
||||||
itemIdentifier // alphanumeric identifier not starting with '.'
|
|
||||||
itemLeftDelim // left action delimiter
|
|
||||||
itemLeftParen // '(' inside action
|
|
||||||
itemNumber // simple number, including imaginary
|
|
||||||
itemPipe // pipe symbol
|
|
||||||
itemRawString // raw quoted string (includes quotes)
|
|
||||||
itemRightDelim // right action delimiter
|
|
||||||
itemElideNewline // elide newline after right delim
|
|
||||||
itemRightParen // ')' inside action
|
|
||||||
itemSpace // run of spaces separating arguments
|
|
||||||
itemString // quoted string (includes quotes)
|
|
||||||
itemText // plain text
|
|
||||||
itemVariable // variable starting with '$', such as '$' or '$1' or '$hello'
|
|
||||||
// Keywords appear after all the rest.
|
|
||||||
itemKeyword // used only to delimit the keywords
|
|
||||||
itemDot // the cursor, spelled '.'
|
|
||||||
itemDefine // define keyword
|
|
||||||
itemElse // else keyword
|
|
||||||
itemEnd // end keyword
|
|
||||||
itemIf // if keyword
|
|
||||||
itemNil // the untyped nil constant, easiest to treat as a keyword
|
|
||||||
itemRange // range keyword
|
|
||||||
itemTemplate // template keyword
|
|
||||||
itemWith // with keyword
|
|
||||||
)
|
|
||||||
|
|
||||||
var key = map[string]itemType{
|
|
||||||
".": itemDot,
|
|
||||||
"define": itemDefine,
|
|
||||||
"else": itemElse,
|
|
||||||
"end": itemEnd,
|
|
||||||
"if": itemIf,
|
|
||||||
"range": itemRange,
|
|
||||||
"nil": itemNil,
|
|
||||||
"template": itemTemplate,
|
|
||||||
"with": itemWith,
|
|
||||||
}
|
|
||||||
|
|
||||||
const eof = -1
|
|
||||||
|
|
||||||
// stateFn represents the state of the scanner as a function that returns the next state.
|
|
||||||
type stateFn func(*lexer) stateFn
|
|
||||||
|
|
||||||
// lexer holds the state of the scanner.
|
|
||||||
type lexer struct {
|
|
||||||
name string // the name of the input; used only for error reports
|
|
||||||
input string // the string being scanned
|
|
||||||
leftDelim string // start of action
|
|
||||||
rightDelim string // end of action
|
|
||||||
state stateFn // the next lexing function to enter
|
|
||||||
pos Pos // current position in the input
|
|
||||||
start Pos // start position of this item
|
|
||||||
width Pos // width of last rune read from input
|
|
||||||
lastPos Pos // position of most recent item returned by nextItem
|
|
||||||
items chan item // channel of scanned items
|
|
||||||
parenDepth int // nesting depth of ( ) exprs
|
|
||||||
}
|
|
||||||
|
|
||||||
// next returns the next rune in the input.
|
|
||||||
func (l *lexer) next() rune {
|
|
||||||
if int(l.pos) >= len(l.input) {
|
|
||||||
l.width = 0
|
|
||||||
return eof
|
|
||||||
}
|
|
||||||
r, w := utf8.DecodeRuneInString(l.input[l.pos:])
|
|
||||||
l.width = Pos(w)
|
|
||||||
l.pos += l.width
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
|
|
||||||
// peek returns but does not consume the next rune in the input.
|
|
||||||
func (l *lexer) peek() rune {
|
|
||||||
r := l.next()
|
|
||||||
l.backup()
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
|
|
||||||
// backup steps back one rune. Can only be called once per call of next.
|
|
||||||
func (l *lexer) backup() {
|
|
||||||
l.pos -= l.width
|
|
||||||
}
|
|
||||||
|
|
||||||
// emit passes an item back to the client.
|
|
||||||
func (l *lexer) emit(t itemType) {
|
|
||||||
l.items <- item{t, l.start, l.input[l.start:l.pos]}
|
|
||||||
l.start = l.pos
|
|
||||||
}
|
|
||||||
|
|
||||||
// ignore skips over the pending input before this point.
|
|
||||||
func (l *lexer) ignore() {
|
|
||||||
l.start = l.pos
|
|
||||||
}
|
|
||||||
|
|
||||||
// accept consumes the next rune if it's from the valid set.
|
|
||||||
func (l *lexer) accept(valid string) bool {
|
|
||||||
if strings.IndexRune(valid, l.next()) >= 0 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
l.backup()
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// acceptRun consumes a run of runes from the valid set.
|
|
||||||
func (l *lexer) acceptRun(valid string) {
|
|
||||||
for strings.IndexRune(valid, l.next()) >= 0 {
|
|
||||||
}
|
|
||||||
l.backup()
|
|
||||||
}
|
|
||||||
|
|
||||||
// lineNumber reports which line we're on, based on the position of
|
|
||||||
// the previous item returned by nextItem. Doing it this way
|
|
||||||
// means we don't have to worry about peek double counting.
|
|
||||||
func (l *lexer) lineNumber() int {
|
|
||||||
return 1 + strings.Count(l.input[:l.lastPos], "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
// errorf returns an error token and terminates the scan by passing
|
|
||||||
// back a nil pointer that will be the next state, terminating l.nextItem.
|
|
||||||
func (l *lexer) errorf(format string, args ...interface{}) stateFn {
|
|
||||||
l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// nextItem returns the next item from the input.
|
|
||||||
func (l *lexer) nextItem() item {
|
|
||||||
item := <-l.items
|
|
||||||
l.lastPos = item.pos
|
|
||||||
return item
|
|
||||||
}
|
|
||||||
|
|
||||||
// lex creates a new scanner for the input string.
|
|
||||||
func lex(name, input, left, right string) *lexer {
|
|
||||||
if left == "" {
|
|
||||||
left = leftDelim
|
|
||||||
}
|
|
||||||
if right == "" {
|
|
||||||
right = rightDelim
|
|
||||||
}
|
|
||||||
l := &lexer{
|
|
||||||
name: name,
|
|
||||||
input: input,
|
|
||||||
leftDelim: left,
|
|
||||||
rightDelim: right,
|
|
||||||
items: make(chan item),
|
|
||||||
}
|
|
||||||
go l.run()
|
|
||||||
return l
|
|
||||||
}
|
|
||||||
|
|
||||||
// run runs the state machine for the lexer.
|
|
||||||
func (l *lexer) run() {
|
|
||||||
for l.state = lexText; l.state != nil; {
|
|
||||||
l.state = l.state(l)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// state functions
|
|
||||||
|
|
||||||
const (
|
|
||||||
leftDelim = "{{"
|
|
||||||
rightDelim = "}}"
|
|
||||||
leftComment = "/*"
|
|
||||||
rightComment = "*/"
|
|
||||||
)
|
|
||||||
|
|
||||||
// lexText scans until an opening action delimiter, "{{".
|
|
||||||
func lexText(l *lexer) stateFn {
|
|
||||||
for {
|
|
||||||
if strings.HasPrefix(l.input[l.pos:], l.leftDelim) {
|
|
||||||
if l.pos > l.start {
|
|
||||||
l.emit(itemText)
|
|
||||||
}
|
|
||||||
return lexLeftDelim
|
|
||||||
}
|
|
||||||
if l.next() == eof {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Correctly reached EOF.
|
|
||||||
if l.pos > l.start {
|
|
||||||
l.emit(itemText)
|
|
||||||
}
|
|
||||||
l.emit(itemEOF)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexLeftDelim scans the left delimiter, which is known to be present.
|
|
||||||
func lexLeftDelim(l *lexer) stateFn {
|
|
||||||
l.pos += Pos(len(l.leftDelim))
|
|
||||||
if strings.HasPrefix(l.input[l.pos:], leftComment) {
|
|
||||||
return lexComment
|
|
||||||
}
|
|
||||||
l.emit(itemLeftDelim)
|
|
||||||
l.parenDepth = 0
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexComment scans a comment. The left comment marker is known to be present.
|
|
||||||
func lexComment(l *lexer) stateFn {
|
|
||||||
l.pos += Pos(len(leftComment))
|
|
||||||
i := strings.Index(l.input[l.pos:], rightComment)
|
|
||||||
if i < 0 {
|
|
||||||
return l.errorf("unclosed comment")
|
|
||||||
}
|
|
||||||
l.pos += Pos(i + len(rightComment))
|
|
||||||
if !strings.HasPrefix(l.input[l.pos:], l.rightDelim) {
|
|
||||||
return l.errorf("comment ends before closing delimiter")
|
|
||||||
|
|
||||||
}
|
|
||||||
l.pos += Pos(len(l.rightDelim))
|
|
||||||
l.ignore()
|
|
||||||
return lexText
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexRightDelim scans the right delimiter, which is known to be present.
|
|
||||||
func lexRightDelim(l *lexer) stateFn {
|
|
||||||
l.pos += Pos(len(l.rightDelim))
|
|
||||||
l.emit(itemRightDelim)
|
|
||||||
if l.peek() == '\\' {
|
|
||||||
l.pos++
|
|
||||||
l.emit(itemElideNewline)
|
|
||||||
}
|
|
||||||
return lexText
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexInsideAction scans the elements inside action delimiters.
|
|
||||||
func lexInsideAction(l *lexer) stateFn {
|
|
||||||
// Either number, quoted string, or identifier.
|
|
||||||
// Spaces separate arguments; runs of spaces turn into itemSpace.
|
|
||||||
// Pipe symbols separate and are emitted.
|
|
||||||
if strings.HasPrefix(l.input[l.pos:], l.rightDelim+"\\") || strings.HasPrefix(l.input[l.pos:], l.rightDelim) {
|
|
||||||
if l.parenDepth == 0 {
|
|
||||||
return lexRightDelim
|
|
||||||
}
|
|
||||||
return l.errorf("unclosed left paren")
|
|
||||||
}
|
|
||||||
switch r := l.next(); {
|
|
||||||
case r == eof || isEndOfLine(r):
|
|
||||||
return l.errorf("unclosed action")
|
|
||||||
case isSpace(r):
|
|
||||||
return lexSpace
|
|
||||||
case r == ':':
|
|
||||||
if l.next() != '=' {
|
|
||||||
return l.errorf("expected :=")
|
|
||||||
}
|
|
||||||
l.emit(itemColonEquals)
|
|
||||||
case r == '|':
|
|
||||||
l.emit(itemPipe)
|
|
||||||
case r == '"':
|
|
||||||
return lexQuote
|
|
||||||
case r == '`':
|
|
||||||
return lexRawQuote
|
|
||||||
case r == '$':
|
|
||||||
return lexVariable
|
|
||||||
case r == '\'':
|
|
||||||
return lexChar
|
|
||||||
case r == '.':
|
|
||||||
// special look-ahead for ".field" so we don't break l.backup().
|
|
||||||
if l.pos < Pos(len(l.input)) {
|
|
||||||
r := l.input[l.pos]
|
|
||||||
if r < '0' || '9' < r {
|
|
||||||
return lexField
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fallthrough // '.' can start a number.
|
|
||||||
case r == '+' || r == '-' || ('0' <= r && r <= '9'):
|
|
||||||
l.backup()
|
|
||||||
return lexNumber
|
|
||||||
case isAlphaNumeric(r):
|
|
||||||
l.backup()
|
|
||||||
return lexIdentifier
|
|
||||||
case r == '(':
|
|
||||||
l.emit(itemLeftParen)
|
|
||||||
l.parenDepth++
|
|
||||||
return lexInsideAction
|
|
||||||
case r == ')':
|
|
||||||
l.emit(itemRightParen)
|
|
||||||
l.parenDepth--
|
|
||||||
if l.parenDepth < 0 {
|
|
||||||
return l.errorf("unexpected right paren %#U", r)
|
|
||||||
}
|
|
||||||
return lexInsideAction
|
|
||||||
case r <= unicode.MaxASCII && unicode.IsPrint(r):
|
|
||||||
l.emit(itemChar)
|
|
||||||
return lexInsideAction
|
|
||||||
default:
|
|
||||||
return l.errorf("unrecognized character in action: %#U", r)
|
|
||||||
}
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexSpace scans a run of space characters.
|
|
||||||
// One space has already been seen.
|
|
||||||
func lexSpace(l *lexer) stateFn {
|
|
||||||
for isSpace(l.peek()) {
|
|
||||||
l.next()
|
|
||||||
}
|
|
||||||
l.emit(itemSpace)
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexIdentifier scans an alphanumeric.
|
|
||||||
func lexIdentifier(l *lexer) stateFn {
|
|
||||||
Loop:
|
|
||||||
for {
|
|
||||||
switch r := l.next(); {
|
|
||||||
case isAlphaNumeric(r):
|
|
||||||
// absorb.
|
|
||||||
default:
|
|
||||||
l.backup()
|
|
||||||
word := l.input[l.start:l.pos]
|
|
||||||
if !l.atTerminator() {
|
|
||||||
return l.errorf("bad character %#U", r)
|
|
||||||
}
|
|
||||||
switch {
|
|
||||||
case key[word] > itemKeyword:
|
|
||||||
l.emit(key[word])
|
|
||||||
case word[0] == '.':
|
|
||||||
l.emit(itemField)
|
|
||||||
case word == "true", word == "false":
|
|
||||||
l.emit(itemBool)
|
|
||||||
default:
|
|
||||||
l.emit(itemIdentifier)
|
|
||||||
}
|
|
||||||
break Loop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexField scans a field: .Alphanumeric.
|
|
||||||
// The . has been scanned.
|
|
||||||
func lexField(l *lexer) stateFn {
|
|
||||||
return lexFieldOrVariable(l, itemField)
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexVariable scans a Variable: $Alphanumeric.
|
|
||||||
// The $ has been scanned.
|
|
||||||
func lexVariable(l *lexer) stateFn {
|
|
||||||
if l.atTerminator() { // Nothing interesting follows -> "$".
|
|
||||||
l.emit(itemVariable)
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
return lexFieldOrVariable(l, itemVariable)
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexVariable scans a field or variable: [.$]Alphanumeric.
|
|
||||||
// The . or $ has been scanned.
|
|
||||||
func lexFieldOrVariable(l *lexer, typ itemType) stateFn {
|
|
||||||
if l.atTerminator() { // Nothing interesting follows -> "." or "$".
|
|
||||||
if typ == itemVariable {
|
|
||||||
l.emit(itemVariable)
|
|
||||||
} else {
|
|
||||||
l.emit(itemDot)
|
|
||||||
}
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
var r rune
|
|
||||||
for {
|
|
||||||
r = l.next()
|
|
||||||
if !isAlphaNumeric(r) {
|
|
||||||
l.backup()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !l.atTerminator() {
|
|
||||||
return l.errorf("bad character %#U", r)
|
|
||||||
}
|
|
||||||
l.emit(typ)
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
|
|
||||||
// atTerminator reports whether the input is at valid termination character to
|
|
||||||
// appear after an identifier. Breaks .X.Y into two pieces. Also catches cases
|
|
||||||
// like "$x+2" not being acceptable without a space, in case we decide one
|
|
||||||
// day to implement arithmetic.
|
|
||||||
func (l *lexer) atTerminator() bool {
|
|
||||||
r := l.peek()
|
|
||||||
if isSpace(r) || isEndOfLine(r) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
switch r {
|
|
||||||
case eof, '.', ',', '|', ':', ')', '(':
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
// Does r start the delimiter? This can be ambiguous (with delim=="//", $x/2 will
|
|
||||||
// succeed but should fail) but only in extremely rare cases caused by willfully
|
|
||||||
// bad choice of delimiter.
|
|
||||||
if rd, _ := utf8.DecodeRuneInString(l.rightDelim); rd == r {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexChar scans a character constant. The initial quote is already
|
|
||||||
// scanned. Syntax checking is done by the parser.
|
|
||||||
func lexChar(l *lexer) stateFn {
|
|
||||||
Loop:
|
|
||||||
for {
|
|
||||||
switch l.next() {
|
|
||||||
case '\\':
|
|
||||||
if r := l.next(); r != eof && r != '\n' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
fallthrough
|
|
||||||
case eof, '\n':
|
|
||||||
return l.errorf("unterminated character constant")
|
|
||||||
case '\'':
|
|
||||||
break Loop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
l.emit(itemCharConstant)
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This
|
|
||||||
// isn't a perfect number scanner - for instance it accepts "." and "0x0.2"
|
|
||||||
// and "089" - but when it's wrong the input is invalid and the parser (via
|
|
||||||
// strconv) will notice.
|
|
||||||
func lexNumber(l *lexer) stateFn {
|
|
||||||
if !l.scanNumber() {
|
|
||||||
return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
|
|
||||||
}
|
|
||||||
if sign := l.peek(); sign == '+' || sign == '-' {
|
|
||||||
// Complex: 1+2i. No spaces, must end in 'i'.
|
|
||||||
if !l.scanNumber() || l.input[l.pos-1] != 'i' {
|
|
||||||
return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
|
|
||||||
}
|
|
||||||
l.emit(itemComplex)
|
|
||||||
} else {
|
|
||||||
l.emit(itemNumber)
|
|
||||||
}
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *lexer) scanNumber() bool {
|
|
||||||
// Optional leading sign.
|
|
||||||
l.accept("+-")
|
|
||||||
// Is it hex?
|
|
||||||
digits := "0123456789"
|
|
||||||
if l.accept("0") && l.accept("xX") {
|
|
||||||
digits = "0123456789abcdefABCDEF"
|
|
||||||
}
|
|
||||||
l.acceptRun(digits)
|
|
||||||
if l.accept(".") {
|
|
||||||
l.acceptRun(digits)
|
|
||||||
}
|
|
||||||
if l.accept("eE") {
|
|
||||||
l.accept("+-")
|
|
||||||
l.acceptRun("0123456789")
|
|
||||||
}
|
|
||||||
// Is it imaginary?
|
|
||||||
l.accept("i")
|
|
||||||
// Next thing mustn't be alphanumeric.
|
|
||||||
if isAlphaNumeric(l.peek()) {
|
|
||||||
l.next()
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexQuote scans a quoted string.
|
|
||||||
func lexQuote(l *lexer) stateFn {
|
|
||||||
Loop:
|
|
||||||
for {
|
|
||||||
switch l.next() {
|
|
||||||
case '\\':
|
|
||||||
if r := l.next(); r != eof && r != '\n' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
fallthrough
|
|
||||||
case eof, '\n':
|
|
||||||
return l.errorf("unterminated quoted string")
|
|
||||||
case '"':
|
|
||||||
break Loop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
l.emit(itemString)
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
|
|
||||||
// lexRawQuote scans a raw quoted string.
|
|
||||||
func lexRawQuote(l *lexer) stateFn {
|
|
||||||
Loop:
|
|
||||||
for {
|
|
||||||
switch l.next() {
|
|
||||||
case eof, '\n':
|
|
||||||
return l.errorf("unterminated raw quoted string")
|
|
||||||
case '`':
|
|
||||||
break Loop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
l.emit(itemRawString)
|
|
||||||
return lexInsideAction
|
|
||||||
}
|
|
||||||
|
|
||||||
// isSpace reports whether r is a space character.
|
|
||||||
func isSpace(r rune) bool {
|
|
||||||
return r == ' ' || r == '\t'
|
|
||||||
}
|
|
||||||
|
|
||||||
// isEndOfLine reports whether r is an end-of-line character.
|
|
||||||
func isEndOfLine(r rune) bool {
|
|
||||||
return r == '\r' || r == '\n'
|
|
||||||
}
|
|
||||||
|
|
||||||
// isAlphaNumeric reports whether r is an alphabetic, digit, or underscore.
|
|
||||||
func isAlphaNumeric(r rune) bool {
|
|
||||||
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
|
|
||||||
}
|
|
834
vendor/github.com/alecthomas/template/parse/node.go
generated
vendored
834
vendor/github.com/alecthomas/template/parse/node.go
generated
vendored
@ -1,834 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// Parse nodes.
|
|
||||||
|
|
||||||
package parse
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
var textFormat = "%s" // Changed to "%q" in tests for better error messages.
|
|
||||||
|
|
||||||
// A Node is an element in the parse tree. The interface is trivial.
|
|
||||||
// The interface contains an unexported method so that only
|
|
||||||
// types local to this package can satisfy it.
|
|
||||||
type Node interface {
|
|
||||||
Type() NodeType
|
|
||||||
String() string
|
|
||||||
// Copy does a deep copy of the Node and all its components.
|
|
||||||
// To avoid type assertions, some XxxNodes also have specialized
|
|
||||||
// CopyXxx methods that return *XxxNode.
|
|
||||||
Copy() Node
|
|
||||||
Position() Pos // byte position of start of node in full original input string
|
|
||||||
// tree returns the containing *Tree.
|
|
||||||
// It is unexported so all implementations of Node are in this package.
|
|
||||||
tree() *Tree
|
|
||||||
}
|
|
||||||
|
|
||||||
// NodeType identifies the type of a parse tree node.
|
|
||||||
type NodeType int
|
|
||||||
|
|
||||||
// Pos represents a byte position in the original input text from which
|
|
||||||
// this template was parsed.
|
|
||||||
type Pos int
|
|
||||||
|
|
||||||
func (p Pos) Position() Pos {
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
// Type returns itself and provides an easy default implementation
|
|
||||||
// for embedding in a Node. Embedded in all non-trivial Nodes.
|
|
||||||
func (t NodeType) Type() NodeType {
|
|
||||||
return t
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
NodeText NodeType = iota // Plain text.
|
|
||||||
NodeAction // A non-control action such as a field evaluation.
|
|
||||||
NodeBool // A boolean constant.
|
|
||||||
NodeChain // A sequence of field accesses.
|
|
||||||
NodeCommand // An element of a pipeline.
|
|
||||||
NodeDot // The cursor, dot.
|
|
||||||
nodeElse // An else action. Not added to tree.
|
|
||||||
nodeEnd // An end action. Not added to tree.
|
|
||||||
NodeField // A field or method name.
|
|
||||||
NodeIdentifier // An identifier; always a function name.
|
|
||||||
NodeIf // An if action.
|
|
||||||
NodeList // A list of Nodes.
|
|
||||||
NodeNil // An untyped nil constant.
|
|
||||||
NodeNumber // A numerical constant.
|
|
||||||
NodePipe // A pipeline of commands.
|
|
||||||
NodeRange // A range action.
|
|
||||||
NodeString // A string constant.
|
|
||||||
NodeTemplate // A template invocation action.
|
|
||||||
NodeVariable // A $ variable.
|
|
||||||
NodeWith // A with action.
|
|
||||||
)
|
|
||||||
|
|
||||||
// Nodes.
|
|
||||||
|
|
||||||
// ListNode holds a sequence of nodes.
|
|
||||||
type ListNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Nodes []Node // The element nodes in lexical order.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newList(pos Pos) *ListNode {
|
|
||||||
return &ListNode{tr: t, NodeType: NodeList, Pos: pos}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *ListNode) append(n Node) {
|
|
||||||
l.Nodes = append(l.Nodes, n)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *ListNode) tree() *Tree {
|
|
||||||
return l.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *ListNode) String() string {
|
|
||||||
b := new(bytes.Buffer)
|
|
||||||
for _, n := range l.Nodes {
|
|
||||||
fmt.Fprint(b, n)
|
|
||||||
}
|
|
||||||
return b.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *ListNode) CopyList() *ListNode {
|
|
||||||
if l == nil {
|
|
||||||
return l
|
|
||||||
}
|
|
||||||
n := l.tr.newList(l.Pos)
|
|
||||||
for _, elem := range l.Nodes {
|
|
||||||
n.append(elem.Copy())
|
|
||||||
}
|
|
||||||
return n
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *ListNode) Copy() Node {
|
|
||||||
return l.CopyList()
|
|
||||||
}
|
|
||||||
|
|
||||||
// TextNode holds plain text.
|
|
||||||
type TextNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Text []byte // The text; may span newlines.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newText(pos Pos, text string) *TextNode {
|
|
||||||
return &TextNode{tr: t, NodeType: NodeText, Pos: pos, Text: []byte(text)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *TextNode) String() string {
|
|
||||||
return fmt.Sprintf(textFormat, t.Text)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *TextNode) tree() *Tree {
|
|
||||||
return t.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *TextNode) Copy() Node {
|
|
||||||
return &TextNode{tr: t.tr, NodeType: NodeText, Pos: t.Pos, Text: append([]byte{}, t.Text...)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// PipeNode holds a pipeline with optional declaration
|
|
||||||
type PipeNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
|
||||||
Decl []*VariableNode // Variable declarations in lexical order.
|
|
||||||
Cmds []*CommandNode // The commands in lexical order.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newPipeline(pos Pos, line int, decl []*VariableNode) *PipeNode {
|
|
||||||
return &PipeNode{tr: t, NodeType: NodePipe, Pos: pos, Line: line, Decl: decl}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *PipeNode) append(command *CommandNode) {
|
|
||||||
p.Cmds = append(p.Cmds, command)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *PipeNode) String() string {
|
|
||||||
s := ""
|
|
||||||
if len(p.Decl) > 0 {
|
|
||||||
for i, v := range p.Decl {
|
|
||||||
if i > 0 {
|
|
||||||
s += ", "
|
|
||||||
}
|
|
||||||
s += v.String()
|
|
||||||
}
|
|
||||||
s += " := "
|
|
||||||
}
|
|
||||||
for i, c := range p.Cmds {
|
|
||||||
if i > 0 {
|
|
||||||
s += " | "
|
|
||||||
}
|
|
||||||
s += c.String()
|
|
||||||
}
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *PipeNode) tree() *Tree {
|
|
||||||
return p.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *PipeNode) CopyPipe() *PipeNode {
|
|
||||||
if p == nil {
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
var decl []*VariableNode
|
|
||||||
for _, d := range p.Decl {
|
|
||||||
decl = append(decl, d.Copy().(*VariableNode))
|
|
||||||
}
|
|
||||||
n := p.tr.newPipeline(p.Pos, p.Line, decl)
|
|
||||||
for _, c := range p.Cmds {
|
|
||||||
n.append(c.Copy().(*CommandNode))
|
|
||||||
}
|
|
||||||
return n
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *PipeNode) Copy() Node {
|
|
||||||
return p.CopyPipe()
|
|
||||||
}
|
|
||||||
|
|
||||||
// ActionNode holds an action (something bounded by delimiters).
|
|
||||||
// Control actions have their own nodes; ActionNode represents simple
|
|
||||||
// ones such as field evaluations and parenthesized pipelines.
|
|
||||||
type ActionNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
|
||||||
Pipe *PipeNode // The pipeline in the action.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newAction(pos Pos, line int, pipe *PipeNode) *ActionNode {
|
|
||||||
return &ActionNode{tr: t, NodeType: NodeAction, Pos: pos, Line: line, Pipe: pipe}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *ActionNode) String() string {
|
|
||||||
return fmt.Sprintf("{{%s}}", a.Pipe)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *ActionNode) tree() *Tree {
|
|
||||||
return a.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *ActionNode) Copy() Node {
|
|
||||||
return a.tr.newAction(a.Pos, a.Line, a.Pipe.CopyPipe())
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// CommandNode holds a command (a pipeline inside an evaluating action).
|
|
||||||
type CommandNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Args []Node // Arguments in lexical order: Identifier, field, or constant.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newCommand(pos Pos) *CommandNode {
|
|
||||||
return &CommandNode{tr: t, NodeType: NodeCommand, Pos: pos}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *CommandNode) append(arg Node) {
|
|
||||||
c.Args = append(c.Args, arg)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *CommandNode) String() string {
|
|
||||||
s := ""
|
|
||||||
for i, arg := range c.Args {
|
|
||||||
if i > 0 {
|
|
||||||
s += " "
|
|
||||||
}
|
|
||||||
if arg, ok := arg.(*PipeNode); ok {
|
|
||||||
s += "(" + arg.String() + ")"
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
s += arg.String()
|
|
||||||
}
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *CommandNode) tree() *Tree {
|
|
||||||
return c.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *CommandNode) Copy() Node {
|
|
||||||
if c == nil {
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
n := c.tr.newCommand(c.Pos)
|
|
||||||
for _, c := range c.Args {
|
|
||||||
n.append(c.Copy())
|
|
||||||
}
|
|
||||||
return n
|
|
||||||
}
|
|
||||||
|
|
||||||
// IdentifierNode holds an identifier.
|
|
||||||
type IdentifierNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Ident string // The identifier's name.
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIdentifier returns a new IdentifierNode with the given identifier name.
|
|
||||||
func NewIdentifier(ident string) *IdentifierNode {
|
|
||||||
return &IdentifierNode{NodeType: NodeIdentifier, Ident: ident}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetPos sets the position. NewIdentifier is a public method so we can't modify its signature.
|
|
||||||
// Chained for convenience.
|
|
||||||
// TODO: fix one day?
|
|
||||||
func (i *IdentifierNode) SetPos(pos Pos) *IdentifierNode {
|
|
||||||
i.Pos = pos
|
|
||||||
return i
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetTree sets the parent tree for the node. NewIdentifier is a public method so we can't modify its signature.
|
|
||||||
// Chained for convenience.
|
|
||||||
// TODO: fix one day?
|
|
||||||
func (i *IdentifierNode) SetTree(t *Tree) *IdentifierNode {
|
|
||||||
i.tr = t
|
|
||||||
return i
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *IdentifierNode) String() string {
|
|
||||||
return i.Ident
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *IdentifierNode) tree() *Tree {
|
|
||||||
return i.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *IdentifierNode) Copy() Node {
|
|
||||||
return NewIdentifier(i.Ident).SetTree(i.tr).SetPos(i.Pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
// VariableNode holds a list of variable names, possibly with chained field
|
|
||||||
// accesses. The dollar sign is part of the (first) name.
|
|
||||||
type VariableNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Ident []string // Variable name and fields in lexical order.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newVariable(pos Pos, ident string) *VariableNode {
|
|
||||||
return &VariableNode{tr: t, NodeType: NodeVariable, Pos: pos, Ident: strings.Split(ident, ".")}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *VariableNode) String() string {
|
|
||||||
s := ""
|
|
||||||
for i, id := range v.Ident {
|
|
||||||
if i > 0 {
|
|
||||||
s += "."
|
|
||||||
}
|
|
||||||
s += id
|
|
||||||
}
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *VariableNode) tree() *Tree {
|
|
||||||
return v.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *VariableNode) Copy() Node {
|
|
||||||
return &VariableNode{tr: v.tr, NodeType: NodeVariable, Pos: v.Pos, Ident: append([]string{}, v.Ident...)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DotNode holds the special identifier '.'.
|
|
||||||
type DotNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newDot(pos Pos) *DotNode {
|
|
||||||
return &DotNode{tr: t, NodeType: NodeDot, Pos: pos}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DotNode) Type() NodeType {
|
|
||||||
// Override method on embedded NodeType for API compatibility.
|
|
||||||
// TODO: Not really a problem; could change API without effect but
|
|
||||||
// api tool complains.
|
|
||||||
return NodeDot
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DotNode) String() string {
|
|
||||||
return "."
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DotNode) tree() *Tree {
|
|
||||||
return d.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DotNode) Copy() Node {
|
|
||||||
return d.tr.newDot(d.Pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NilNode holds the special identifier 'nil' representing an untyped nil constant.
|
|
||||||
type NilNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newNil(pos Pos) *NilNode {
|
|
||||||
return &NilNode{tr: t, NodeType: NodeNil, Pos: pos}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *NilNode) Type() NodeType {
|
|
||||||
// Override method on embedded NodeType for API compatibility.
|
|
||||||
// TODO: Not really a problem; could change API without effect but
|
|
||||||
// api tool complains.
|
|
||||||
return NodeNil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *NilNode) String() string {
|
|
||||||
return "nil"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *NilNode) tree() *Tree {
|
|
||||||
return n.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *NilNode) Copy() Node {
|
|
||||||
return n.tr.newNil(n.Pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
// FieldNode holds a field (identifier starting with '.').
|
|
||||||
// The names may be chained ('.x.y').
|
|
||||||
// The period is dropped from each ident.
|
|
||||||
type FieldNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Ident []string // The identifiers in lexical order.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newField(pos Pos, ident string) *FieldNode {
|
|
||||||
return &FieldNode{tr: t, NodeType: NodeField, Pos: pos, Ident: strings.Split(ident[1:], ".")} // [1:] to drop leading period
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *FieldNode) String() string {
|
|
||||||
s := ""
|
|
||||||
for _, id := range f.Ident {
|
|
||||||
s += "." + id
|
|
||||||
}
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *FieldNode) tree() *Tree {
|
|
||||||
return f.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *FieldNode) Copy() Node {
|
|
||||||
return &FieldNode{tr: f.tr, NodeType: NodeField, Pos: f.Pos, Ident: append([]string{}, f.Ident...)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ChainNode holds a term followed by a chain of field accesses (identifier starting with '.').
|
|
||||||
// The names may be chained ('.x.y').
|
|
||||||
// The periods are dropped from each ident.
|
|
||||||
type ChainNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Node Node
|
|
||||||
Field []string // The identifiers in lexical order.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newChain(pos Pos, node Node) *ChainNode {
|
|
||||||
return &ChainNode{tr: t, NodeType: NodeChain, Pos: pos, Node: node}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add adds the named field (which should start with a period) to the end of the chain.
|
|
||||||
func (c *ChainNode) Add(field string) {
|
|
||||||
if len(field) == 0 || field[0] != '.' {
|
|
||||||
panic("no dot in field")
|
|
||||||
}
|
|
||||||
field = field[1:] // Remove leading dot.
|
|
||||||
if field == "" {
|
|
||||||
panic("empty field")
|
|
||||||
}
|
|
||||||
c.Field = append(c.Field, field)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChainNode) String() string {
|
|
||||||
s := c.Node.String()
|
|
||||||
if _, ok := c.Node.(*PipeNode); ok {
|
|
||||||
s = "(" + s + ")"
|
|
||||||
}
|
|
||||||
for _, field := range c.Field {
|
|
||||||
s += "." + field
|
|
||||||
}
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChainNode) tree() *Tree {
|
|
||||||
return c.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChainNode) Copy() Node {
|
|
||||||
return &ChainNode{tr: c.tr, NodeType: NodeChain, Pos: c.Pos, Node: c.Node, Field: append([]string{}, c.Field...)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// BoolNode holds a boolean constant.
|
|
||||||
type BoolNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
True bool // The value of the boolean constant.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newBool(pos Pos, true bool) *BoolNode {
|
|
||||||
return &BoolNode{tr: t, NodeType: NodeBool, Pos: pos, True: true}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *BoolNode) String() string {
|
|
||||||
if b.True {
|
|
||||||
return "true"
|
|
||||||
}
|
|
||||||
return "false"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *BoolNode) tree() *Tree {
|
|
||||||
return b.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *BoolNode) Copy() Node {
|
|
||||||
return b.tr.newBool(b.Pos, b.True)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NumberNode holds a number: signed or unsigned integer, float, or complex.
|
|
||||||
// The value is parsed and stored under all the types that can represent the value.
|
|
||||||
// This simulates in a small amount of code the behavior of Go's ideal constants.
|
|
||||||
type NumberNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
IsInt bool // Number has an integral value.
|
|
||||||
IsUint bool // Number has an unsigned integral value.
|
|
||||||
IsFloat bool // Number has a floating-point value.
|
|
||||||
IsComplex bool // Number is complex.
|
|
||||||
Int64 int64 // The signed integer value.
|
|
||||||
Uint64 uint64 // The unsigned integer value.
|
|
||||||
Float64 float64 // The floating-point value.
|
|
||||||
Complex128 complex128 // The complex value.
|
|
||||||
Text string // The original textual representation from the input.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newNumber(pos Pos, text string, typ itemType) (*NumberNode, error) {
|
|
||||||
n := &NumberNode{tr: t, NodeType: NodeNumber, Pos: pos, Text: text}
|
|
||||||
switch typ {
|
|
||||||
case itemCharConstant:
|
|
||||||
rune, _, tail, err := strconv.UnquoteChar(text[1:], text[0])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if tail != "'" {
|
|
||||||
return nil, fmt.Errorf("malformed character constant: %s", text)
|
|
||||||
}
|
|
||||||
n.Int64 = int64(rune)
|
|
||||||
n.IsInt = true
|
|
||||||
n.Uint64 = uint64(rune)
|
|
||||||
n.IsUint = true
|
|
||||||
n.Float64 = float64(rune) // odd but those are the rules.
|
|
||||||
n.IsFloat = true
|
|
||||||
return n, nil
|
|
||||||
case itemComplex:
|
|
||||||
// fmt.Sscan can parse the pair, so let it do the work.
|
|
||||||
if _, err := fmt.Sscan(text, &n.Complex128); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
n.IsComplex = true
|
|
||||||
n.simplifyComplex()
|
|
||||||
return n, nil
|
|
||||||
}
|
|
||||||
// Imaginary constants can only be complex unless they are zero.
|
|
||||||
if len(text) > 0 && text[len(text)-1] == 'i' {
|
|
||||||
f, err := strconv.ParseFloat(text[:len(text)-1], 64)
|
|
||||||
if err == nil {
|
|
||||||
n.IsComplex = true
|
|
||||||
n.Complex128 = complex(0, f)
|
|
||||||
n.simplifyComplex()
|
|
||||||
return n, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Do integer test first so we get 0x123 etc.
|
|
||||||
u, err := strconv.ParseUint(text, 0, 64) // will fail for -0; fixed below.
|
|
||||||
if err == nil {
|
|
||||||
n.IsUint = true
|
|
||||||
n.Uint64 = u
|
|
||||||
}
|
|
||||||
i, err := strconv.ParseInt(text, 0, 64)
|
|
||||||
if err == nil {
|
|
||||||
n.IsInt = true
|
|
||||||
n.Int64 = i
|
|
||||||
if i == 0 {
|
|
||||||
n.IsUint = true // in case of -0.
|
|
||||||
n.Uint64 = u
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If an integer extraction succeeded, promote the float.
|
|
||||||
if n.IsInt {
|
|
||||||
n.IsFloat = true
|
|
||||||
n.Float64 = float64(n.Int64)
|
|
||||||
} else if n.IsUint {
|
|
||||||
n.IsFloat = true
|
|
||||||
n.Float64 = float64(n.Uint64)
|
|
||||||
} else {
|
|
||||||
f, err := strconv.ParseFloat(text, 64)
|
|
||||||
if err == nil {
|
|
||||||
n.IsFloat = true
|
|
||||||
n.Float64 = f
|
|
||||||
// If a floating-point extraction succeeded, extract the int if needed.
|
|
||||||
if !n.IsInt && float64(int64(f)) == f {
|
|
||||||
n.IsInt = true
|
|
||||||
n.Int64 = int64(f)
|
|
||||||
}
|
|
||||||
if !n.IsUint && float64(uint64(f)) == f {
|
|
||||||
n.IsUint = true
|
|
||||||
n.Uint64 = uint64(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !n.IsInt && !n.IsUint && !n.IsFloat {
|
|
||||||
return nil, fmt.Errorf("illegal number syntax: %q", text)
|
|
||||||
}
|
|
||||||
return n, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// simplifyComplex pulls out any other types that are represented by the complex number.
|
|
||||||
// These all require that the imaginary part be zero.
|
|
||||||
func (n *NumberNode) simplifyComplex() {
|
|
||||||
n.IsFloat = imag(n.Complex128) == 0
|
|
||||||
if n.IsFloat {
|
|
||||||
n.Float64 = real(n.Complex128)
|
|
||||||
n.IsInt = float64(int64(n.Float64)) == n.Float64
|
|
||||||
if n.IsInt {
|
|
||||||
n.Int64 = int64(n.Float64)
|
|
||||||
}
|
|
||||||
n.IsUint = float64(uint64(n.Float64)) == n.Float64
|
|
||||||
if n.IsUint {
|
|
||||||
n.Uint64 = uint64(n.Float64)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *NumberNode) String() string {
|
|
||||||
return n.Text
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *NumberNode) tree() *Tree {
|
|
||||||
return n.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *NumberNode) Copy() Node {
|
|
||||||
nn := new(NumberNode)
|
|
||||||
*nn = *n // Easy, fast, correct.
|
|
||||||
return nn
|
|
||||||
}
|
|
||||||
|
|
||||||
// StringNode holds a string constant. The value has been "unquoted".
|
|
||||||
type StringNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Quoted string // The original text of the string, with quotes.
|
|
||||||
Text string // The string, after quote processing.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newString(pos Pos, orig, text string) *StringNode {
|
|
||||||
return &StringNode{tr: t, NodeType: NodeString, Pos: pos, Quoted: orig, Text: text}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *StringNode) String() string {
|
|
||||||
return s.Quoted
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *StringNode) tree() *Tree {
|
|
||||||
return s.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *StringNode) Copy() Node {
|
|
||||||
return s.tr.newString(s.Pos, s.Quoted, s.Text)
|
|
||||||
}
|
|
||||||
|
|
||||||
// endNode represents an {{end}} action.
|
|
||||||
// It does not appear in the final parse tree.
|
|
||||||
type endNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newEnd(pos Pos) *endNode {
|
|
||||||
return &endNode{tr: t, NodeType: nodeEnd, Pos: pos}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *endNode) String() string {
|
|
||||||
return "{{end}}"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *endNode) tree() *Tree {
|
|
||||||
return e.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *endNode) Copy() Node {
|
|
||||||
return e.tr.newEnd(e.Pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
// elseNode represents an {{else}} action. Does not appear in the final tree.
|
|
||||||
type elseNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newElse(pos Pos, line int) *elseNode {
|
|
||||||
return &elseNode{tr: t, NodeType: nodeElse, Pos: pos, Line: line}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *elseNode) Type() NodeType {
|
|
||||||
return nodeElse
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *elseNode) String() string {
|
|
||||||
return "{{else}}"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *elseNode) tree() *Tree {
|
|
||||||
return e.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *elseNode) Copy() Node {
|
|
||||||
return e.tr.newElse(e.Pos, e.Line)
|
|
||||||
}
|
|
||||||
|
|
||||||
// BranchNode is the common representation of if, range, and with.
|
|
||||||
type BranchNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
|
||||||
Pipe *PipeNode // The pipeline to be evaluated.
|
|
||||||
List *ListNode // What to execute if the value is non-empty.
|
|
||||||
ElseList *ListNode // What to execute if the value is empty (nil if absent).
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *BranchNode) String() string {
|
|
||||||
name := ""
|
|
||||||
switch b.NodeType {
|
|
||||||
case NodeIf:
|
|
||||||
name = "if"
|
|
||||||
case NodeRange:
|
|
||||||
name = "range"
|
|
||||||
case NodeWith:
|
|
||||||
name = "with"
|
|
||||||
default:
|
|
||||||
panic("unknown branch type")
|
|
||||||
}
|
|
||||||
if b.ElseList != nil {
|
|
||||||
return fmt.Sprintf("{{%s %s}}%s{{else}}%s{{end}}", name, b.Pipe, b.List, b.ElseList)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("{{%s %s}}%s{{end}}", name, b.Pipe, b.List)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *BranchNode) tree() *Tree {
|
|
||||||
return b.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *BranchNode) Copy() Node {
|
|
||||||
switch b.NodeType {
|
|
||||||
case NodeIf:
|
|
||||||
return b.tr.newIf(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
|
|
||||||
case NodeRange:
|
|
||||||
return b.tr.newRange(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
|
|
||||||
case NodeWith:
|
|
||||||
return b.tr.newWith(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
|
|
||||||
default:
|
|
||||||
panic("unknown branch type")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// IfNode represents an {{if}} action and its commands.
|
|
||||||
type IfNode struct {
|
|
||||||
BranchNode
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newIf(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *IfNode {
|
|
||||||
return &IfNode{BranchNode{tr: t, NodeType: NodeIf, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *IfNode) Copy() Node {
|
|
||||||
return i.tr.newIf(i.Pos, i.Line, i.Pipe.CopyPipe(), i.List.CopyList(), i.ElseList.CopyList())
|
|
||||||
}
|
|
||||||
|
|
||||||
// RangeNode represents a {{range}} action and its commands.
|
|
||||||
type RangeNode struct {
|
|
||||||
BranchNode
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newRange(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *RangeNode {
|
|
||||||
return &RangeNode{BranchNode{tr: t, NodeType: NodeRange, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RangeNode) Copy() Node {
|
|
||||||
return r.tr.newRange(r.Pos, r.Line, r.Pipe.CopyPipe(), r.List.CopyList(), r.ElseList.CopyList())
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithNode represents a {{with}} action and its commands.
|
|
||||||
type WithNode struct {
|
|
||||||
BranchNode
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newWith(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *WithNode {
|
|
||||||
return &WithNode{BranchNode{tr: t, NodeType: NodeWith, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *WithNode) Copy() Node {
|
|
||||||
return w.tr.newWith(w.Pos, w.Line, w.Pipe.CopyPipe(), w.List.CopyList(), w.ElseList.CopyList())
|
|
||||||
}
|
|
||||||
|
|
||||||
// TemplateNode represents a {{template}} action.
|
|
||||||
type TemplateNode struct {
|
|
||||||
NodeType
|
|
||||||
Pos
|
|
||||||
tr *Tree
|
|
||||||
Line int // The line number in the input (deprecated; kept for compatibility)
|
|
||||||
Name string // The name of the template (unquoted).
|
|
||||||
Pipe *PipeNode // The command to evaluate as dot for the template.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) newTemplate(pos Pos, line int, name string, pipe *PipeNode) *TemplateNode {
|
|
||||||
return &TemplateNode{tr: t, NodeType: NodeTemplate, Pos: pos, Line: line, Name: name, Pipe: pipe}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *TemplateNode) String() string {
|
|
||||||
if t.Pipe == nil {
|
|
||||||
return fmt.Sprintf("{{template %q}}", t.Name)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("{{template %q %s}}", t.Name, t.Pipe)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *TemplateNode) tree() *Tree {
|
|
||||||
return t.tr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *TemplateNode) Copy() Node {
|
|
||||||
return t.tr.newTemplate(t.Pos, t.Line, t.Name, t.Pipe.CopyPipe())
|
|
||||||
}
|
|
700
vendor/github.com/alecthomas/template/parse/parse.go
generated
vendored
700
vendor/github.com/alecthomas/template/parse/parse.go
generated
vendored
@ -1,700 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// Package parse builds parse trees for templates as defined by text/template
|
|
||||||
// and html/template. Clients should use those packages to construct templates
|
|
||||||
// rather than this one, which provides shared internal data structures not
|
|
||||||
// intended for general use.
|
|
||||||
package parse
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"runtime"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Tree is the representation of a single parsed template.
|
|
||||||
type Tree struct {
|
|
||||||
Name string // name of the template represented by the tree.
|
|
||||||
ParseName string // name of the top-level template during parsing, for error messages.
|
|
||||||
Root *ListNode // top-level root of the tree.
|
|
||||||
text string // text parsed to create the template (or its parent)
|
|
||||||
// Parsing only; cleared after parse.
|
|
||||||
funcs []map[string]interface{}
|
|
||||||
lex *lexer
|
|
||||||
token [3]item // three-token lookahead for parser.
|
|
||||||
peekCount int
|
|
||||||
vars []string // variables defined at the moment.
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy returns a copy of the Tree. Any parsing state is discarded.
|
|
||||||
func (t *Tree) Copy() *Tree {
|
|
||||||
if t == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return &Tree{
|
|
||||||
Name: t.Name,
|
|
||||||
ParseName: t.ParseName,
|
|
||||||
Root: t.Root.CopyList(),
|
|
||||||
text: t.text,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse returns a map from template name to parse.Tree, created by parsing the
|
|
||||||
// templates described in the argument string. The top-level template will be
|
|
||||||
// given the specified name. If an error is encountered, parsing stops and an
|
|
||||||
// empty map is returned with the error.
|
|
||||||
func Parse(name, text, leftDelim, rightDelim string, funcs ...map[string]interface{}) (treeSet map[string]*Tree, err error) {
|
|
||||||
treeSet = make(map[string]*Tree)
|
|
||||||
t := New(name)
|
|
||||||
t.text = text
|
|
||||||
_, err = t.Parse(text, leftDelim, rightDelim, treeSet, funcs...)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// next returns the next token.
|
|
||||||
func (t *Tree) next() item {
|
|
||||||
if t.peekCount > 0 {
|
|
||||||
t.peekCount--
|
|
||||||
} else {
|
|
||||||
t.token[0] = t.lex.nextItem()
|
|
||||||
}
|
|
||||||
return t.token[t.peekCount]
|
|
||||||
}
|
|
||||||
|
|
||||||
// backup backs the input stream up one token.
|
|
||||||
func (t *Tree) backup() {
|
|
||||||
t.peekCount++
|
|
||||||
}
|
|
||||||
|
|
||||||
// backup2 backs the input stream up two tokens.
|
|
||||||
// The zeroth token is already there.
|
|
||||||
func (t *Tree) backup2(t1 item) {
|
|
||||||
t.token[1] = t1
|
|
||||||
t.peekCount = 2
|
|
||||||
}
|
|
||||||
|
|
||||||
// backup3 backs the input stream up three tokens
|
|
||||||
// The zeroth token is already there.
|
|
||||||
func (t *Tree) backup3(t2, t1 item) { // Reverse order: we're pushing back.
|
|
||||||
t.token[1] = t1
|
|
||||||
t.token[2] = t2
|
|
||||||
t.peekCount = 3
|
|
||||||
}
|
|
||||||
|
|
||||||
// peek returns but does not consume the next token.
|
|
||||||
func (t *Tree) peek() item {
|
|
||||||
if t.peekCount > 0 {
|
|
||||||
return t.token[t.peekCount-1]
|
|
||||||
}
|
|
||||||
t.peekCount = 1
|
|
||||||
t.token[0] = t.lex.nextItem()
|
|
||||||
return t.token[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// nextNonSpace returns the next non-space token.
|
|
||||||
func (t *Tree) nextNonSpace() (token item) {
|
|
||||||
for {
|
|
||||||
token = t.next()
|
|
||||||
if token.typ != itemSpace {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// peekNonSpace returns but does not consume the next non-space token.
|
|
||||||
func (t *Tree) peekNonSpace() (token item) {
|
|
||||||
for {
|
|
||||||
token = t.next()
|
|
||||||
if token.typ != itemSpace {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
t.backup()
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parsing.
|
|
||||||
|
|
||||||
// New allocates a new parse tree with the given name.
|
|
||||||
func New(name string, funcs ...map[string]interface{}) *Tree {
|
|
||||||
return &Tree{
|
|
||||||
Name: name,
|
|
||||||
funcs: funcs,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrorContext returns a textual representation of the location of the node in the input text.
|
|
||||||
// The receiver is only used when the node does not have a pointer to the tree inside,
|
|
||||||
// which can occur in old code.
|
|
||||||
func (t *Tree) ErrorContext(n Node) (location, context string) {
|
|
||||||
pos := int(n.Position())
|
|
||||||
tree := n.tree()
|
|
||||||
if tree == nil {
|
|
||||||
tree = t
|
|
||||||
}
|
|
||||||
text := tree.text[:pos]
|
|
||||||
byteNum := strings.LastIndex(text, "\n")
|
|
||||||
if byteNum == -1 {
|
|
||||||
byteNum = pos // On first line.
|
|
||||||
} else {
|
|
||||||
byteNum++ // After the newline.
|
|
||||||
byteNum = pos - byteNum
|
|
||||||
}
|
|
||||||
lineNum := 1 + strings.Count(text, "\n")
|
|
||||||
context = n.String()
|
|
||||||
if len(context) > 20 {
|
|
||||||
context = fmt.Sprintf("%.20s...", context)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%s:%d:%d", tree.ParseName, lineNum, byteNum), context
|
|
||||||
}
|
|
||||||
|
|
||||||
// errorf formats the error and terminates processing.
|
|
||||||
func (t *Tree) errorf(format string, args ...interface{}) {
|
|
||||||
t.Root = nil
|
|
||||||
format = fmt.Sprintf("template: %s:%d: %s", t.ParseName, t.lex.lineNumber(), format)
|
|
||||||
panic(fmt.Errorf(format, args...))
|
|
||||||
}
|
|
||||||
|
|
||||||
// error terminates processing.
|
|
||||||
func (t *Tree) error(err error) {
|
|
||||||
t.errorf("%s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// expect consumes the next token and guarantees it has the required type.
|
|
||||||
func (t *Tree) expect(expected itemType, context string) item {
|
|
||||||
token := t.nextNonSpace()
|
|
||||||
if token.typ != expected {
|
|
||||||
t.unexpected(token, context)
|
|
||||||
}
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectOneOf consumes the next token and guarantees it has one of the required types.
|
|
||||||
func (t *Tree) expectOneOf(expected1, expected2 itemType, context string) item {
|
|
||||||
token := t.nextNonSpace()
|
|
||||||
if token.typ != expected1 && token.typ != expected2 {
|
|
||||||
t.unexpected(token, context)
|
|
||||||
}
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// unexpected complains about the token and terminates processing.
|
|
||||||
func (t *Tree) unexpected(token item, context string) {
|
|
||||||
t.errorf("unexpected %s in %s", token, context)
|
|
||||||
}
|
|
||||||
|
|
||||||
// recover is the handler that turns panics into returns from the top level of Parse.
|
|
||||||
func (t *Tree) recover(errp *error) {
|
|
||||||
e := recover()
|
|
||||||
if e != nil {
|
|
||||||
if _, ok := e.(runtime.Error); ok {
|
|
||||||
panic(e)
|
|
||||||
}
|
|
||||||
if t != nil {
|
|
||||||
t.stopParse()
|
|
||||||
}
|
|
||||||
*errp = e.(error)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// startParse initializes the parser, using the lexer.
|
|
||||||
func (t *Tree) startParse(funcs []map[string]interface{}, lex *lexer) {
|
|
||||||
t.Root = nil
|
|
||||||
t.lex = lex
|
|
||||||
t.vars = []string{"$"}
|
|
||||||
t.funcs = funcs
|
|
||||||
}
|
|
||||||
|
|
||||||
// stopParse terminates parsing.
|
|
||||||
func (t *Tree) stopParse() {
|
|
||||||
t.lex = nil
|
|
||||||
t.vars = nil
|
|
||||||
t.funcs = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse parses the template definition string to construct a representation of
|
|
||||||
// the template for execution. If either action delimiter string is empty, the
|
|
||||||
// default ("{{" or "}}") is used. Embedded template definitions are added to
|
|
||||||
// the treeSet map.
|
|
||||||
func (t *Tree) Parse(text, leftDelim, rightDelim string, treeSet map[string]*Tree, funcs ...map[string]interface{}) (tree *Tree, err error) {
|
|
||||||
defer t.recover(&err)
|
|
||||||
t.ParseName = t.Name
|
|
||||||
t.startParse(funcs, lex(t.Name, text, leftDelim, rightDelim))
|
|
||||||
t.text = text
|
|
||||||
t.parse(treeSet)
|
|
||||||
t.add(treeSet)
|
|
||||||
t.stopParse()
|
|
||||||
return t, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// add adds tree to the treeSet.
|
|
||||||
func (t *Tree) add(treeSet map[string]*Tree) {
|
|
||||||
tree := treeSet[t.Name]
|
|
||||||
if tree == nil || IsEmptyTree(tree.Root) {
|
|
||||||
treeSet[t.Name] = t
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if !IsEmptyTree(t.Root) {
|
|
||||||
t.errorf("template: multiple definition of template %q", t.Name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsEmptyTree reports whether this tree (node) is empty of everything but space.
|
|
||||||
func IsEmptyTree(n Node) bool {
|
|
||||||
switch n := n.(type) {
|
|
||||||
case nil:
|
|
||||||
return true
|
|
||||||
case *ActionNode:
|
|
||||||
case *IfNode:
|
|
||||||
case *ListNode:
|
|
||||||
for _, node := range n.Nodes {
|
|
||||||
if !IsEmptyTree(node) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
case *RangeNode:
|
|
||||||
case *TemplateNode:
|
|
||||||
case *TextNode:
|
|
||||||
return len(bytes.TrimSpace(n.Text)) == 0
|
|
||||||
case *WithNode:
|
|
||||||
default:
|
|
||||||
panic("unknown node: " + n.String())
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse is the top-level parser for a template, essentially the same
|
|
||||||
// as itemList except it also parses {{define}} actions.
|
|
||||||
// It runs to EOF.
|
|
||||||
func (t *Tree) parse(treeSet map[string]*Tree) (next Node) {
|
|
||||||
t.Root = t.newList(t.peek().pos)
|
|
||||||
for t.peek().typ != itemEOF {
|
|
||||||
if t.peek().typ == itemLeftDelim {
|
|
||||||
delim := t.next()
|
|
||||||
if t.nextNonSpace().typ == itemDefine {
|
|
||||||
newT := New("definition") // name will be updated once we know it.
|
|
||||||
newT.text = t.text
|
|
||||||
newT.ParseName = t.ParseName
|
|
||||||
newT.startParse(t.funcs, t.lex)
|
|
||||||
newT.parseDefinition(treeSet)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
t.backup2(delim)
|
|
||||||
}
|
|
||||||
n := t.textOrAction()
|
|
||||||
if n.Type() == nodeEnd {
|
|
||||||
t.errorf("unexpected %s", n)
|
|
||||||
}
|
|
||||||
t.Root.append(n)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseDefinition parses a {{define}} ... {{end}} template definition and
|
|
||||||
// installs the definition in the treeSet map. The "define" keyword has already
|
|
||||||
// been scanned.
|
|
||||||
func (t *Tree) parseDefinition(treeSet map[string]*Tree) {
|
|
||||||
const context = "define clause"
|
|
||||||
name := t.expectOneOf(itemString, itemRawString, context)
|
|
||||||
var err error
|
|
||||||
t.Name, err = strconv.Unquote(name.val)
|
|
||||||
if err != nil {
|
|
||||||
t.error(err)
|
|
||||||
}
|
|
||||||
t.expect(itemRightDelim, context)
|
|
||||||
var end Node
|
|
||||||
t.Root, end = t.itemList()
|
|
||||||
if end.Type() != nodeEnd {
|
|
||||||
t.errorf("unexpected %s in %s", end, context)
|
|
||||||
}
|
|
||||||
t.add(treeSet)
|
|
||||||
t.stopParse()
|
|
||||||
}
|
|
||||||
|
|
||||||
// itemList:
|
|
||||||
// textOrAction*
|
|
||||||
// Terminates at {{end}} or {{else}}, returned separately.
|
|
||||||
func (t *Tree) itemList() (list *ListNode, next Node) {
|
|
||||||
list = t.newList(t.peekNonSpace().pos)
|
|
||||||
for t.peekNonSpace().typ != itemEOF {
|
|
||||||
n := t.textOrAction()
|
|
||||||
switch n.Type() {
|
|
||||||
case nodeEnd, nodeElse:
|
|
||||||
return list, n
|
|
||||||
}
|
|
||||||
list.append(n)
|
|
||||||
}
|
|
||||||
t.errorf("unexpected EOF")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// textOrAction:
|
|
||||||
// text | action
|
|
||||||
func (t *Tree) textOrAction() Node {
|
|
||||||
switch token := t.nextNonSpace(); token.typ {
|
|
||||||
case itemElideNewline:
|
|
||||||
return t.elideNewline()
|
|
||||||
case itemText:
|
|
||||||
return t.newText(token.pos, token.val)
|
|
||||||
case itemLeftDelim:
|
|
||||||
return t.action()
|
|
||||||
default:
|
|
||||||
t.unexpected(token, "input")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// elideNewline:
|
|
||||||
// Remove newlines trailing rightDelim if \\ is present.
|
|
||||||
func (t *Tree) elideNewline() Node {
|
|
||||||
token := t.peek()
|
|
||||||
if token.typ != itemText {
|
|
||||||
t.unexpected(token, "input")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
t.next()
|
|
||||||
stripped := strings.TrimLeft(token.val, "\n\r")
|
|
||||||
diff := len(token.val) - len(stripped)
|
|
||||||
if diff > 0 {
|
|
||||||
// This is a bit nasty. We mutate the token in-place to remove
|
|
||||||
// preceding newlines.
|
|
||||||
token.pos += Pos(diff)
|
|
||||||
token.val = stripped
|
|
||||||
}
|
|
||||||
return t.newText(token.pos, token.val)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Action:
|
|
||||||
// control
|
|
||||||
// command ("|" command)*
|
|
||||||
// Left delim is past. Now get actions.
|
|
||||||
// First word could be a keyword such as range.
|
|
||||||
func (t *Tree) action() (n Node) {
|
|
||||||
switch token := t.nextNonSpace(); token.typ {
|
|
||||||
case itemElse:
|
|
||||||
return t.elseControl()
|
|
||||||
case itemEnd:
|
|
||||||
return t.endControl()
|
|
||||||
case itemIf:
|
|
||||||
return t.ifControl()
|
|
||||||
case itemRange:
|
|
||||||
return t.rangeControl()
|
|
||||||
case itemTemplate:
|
|
||||||
return t.templateControl()
|
|
||||||
case itemWith:
|
|
||||||
return t.withControl()
|
|
||||||
}
|
|
||||||
t.backup()
|
|
||||||
// Do not pop variables; they persist until "end".
|
|
||||||
return t.newAction(t.peek().pos, t.lex.lineNumber(), t.pipeline("command"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pipeline:
|
|
||||||
// declarations? command ('|' command)*
|
|
||||||
func (t *Tree) pipeline(context string) (pipe *PipeNode) {
|
|
||||||
var decl []*VariableNode
|
|
||||||
pos := t.peekNonSpace().pos
|
|
||||||
// Are there declarations?
|
|
||||||
for {
|
|
||||||
if v := t.peekNonSpace(); v.typ == itemVariable {
|
|
||||||
t.next()
|
|
||||||
// Since space is a token, we need 3-token look-ahead here in the worst case:
|
|
||||||
// in "$x foo" we need to read "foo" (as opposed to ":=") to know that $x is an
|
|
||||||
// argument variable rather than a declaration. So remember the token
|
|
||||||
// adjacent to the variable so we can push it back if necessary.
|
|
||||||
tokenAfterVariable := t.peek()
|
|
||||||
if next := t.peekNonSpace(); next.typ == itemColonEquals || (next.typ == itemChar && next.val == ",") {
|
|
||||||
t.nextNonSpace()
|
|
||||||
variable := t.newVariable(v.pos, v.val)
|
|
||||||
decl = append(decl, variable)
|
|
||||||
t.vars = append(t.vars, v.val)
|
|
||||||
if next.typ == itemChar && next.val == "," {
|
|
||||||
if context == "range" && len(decl) < 2 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
t.errorf("too many declarations in %s", context)
|
|
||||||
}
|
|
||||||
} else if tokenAfterVariable.typ == itemSpace {
|
|
||||||
t.backup3(v, tokenAfterVariable)
|
|
||||||
} else {
|
|
||||||
t.backup2(v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
pipe = t.newPipeline(pos, t.lex.lineNumber(), decl)
|
|
||||||
for {
|
|
||||||
switch token := t.nextNonSpace(); token.typ {
|
|
||||||
case itemRightDelim, itemRightParen:
|
|
||||||
if len(pipe.Cmds) == 0 {
|
|
||||||
t.errorf("missing value for %s", context)
|
|
||||||
}
|
|
||||||
if token.typ == itemRightParen {
|
|
||||||
t.backup()
|
|
||||||
}
|
|
||||||
return
|
|
||||||
case itemBool, itemCharConstant, itemComplex, itemDot, itemField, itemIdentifier,
|
|
||||||
itemNumber, itemNil, itemRawString, itemString, itemVariable, itemLeftParen:
|
|
||||||
t.backup()
|
|
||||||
pipe.append(t.command())
|
|
||||||
default:
|
|
||||||
t.unexpected(token, context)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Tree) parseControl(allowElseIf bool, context string) (pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) {
|
|
||||||
defer t.popVars(len(t.vars))
|
|
||||||
line = t.lex.lineNumber()
|
|
||||||
pipe = t.pipeline(context)
|
|
||||||
var next Node
|
|
||||||
list, next = t.itemList()
|
|
||||||
switch next.Type() {
|
|
||||||
case nodeEnd: //done
|
|
||||||
case nodeElse:
|
|
||||||
if allowElseIf {
|
|
||||||
// Special case for "else if". If the "else" is followed immediately by an "if",
|
|
||||||
// the elseControl will have left the "if" token pending. Treat
|
|
||||||
// {{if a}}_{{else if b}}_{{end}}
|
|
||||||
// as
|
|
||||||
// {{if a}}_{{else}}{{if b}}_{{end}}{{end}}.
|
|
||||||
// To do this, parse the if as usual and stop at it {{end}}; the subsequent{{end}}
|
|
||||||
// is assumed. This technique works even for long if-else-if chains.
|
|
||||||
// TODO: Should we allow else-if in with and range?
|
|
||||||
if t.peek().typ == itemIf {
|
|
||||||
t.next() // Consume the "if" token.
|
|
||||||
elseList = t.newList(next.Position())
|
|
||||||
elseList.append(t.ifControl())
|
|
||||||
// Do not consume the next item - only one {{end}} required.
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
elseList, next = t.itemList()
|
|
||||||
if next.Type() != nodeEnd {
|
|
||||||
t.errorf("expected end; found %s", next)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return pipe.Position(), line, pipe, list, elseList
|
|
||||||
}
|
|
||||||
|
|
||||||
// If:
|
|
||||||
// {{if pipeline}} itemList {{end}}
|
|
||||||
// {{if pipeline}} itemList {{else}} itemList {{end}}
|
|
||||||
// If keyword is past.
|
|
||||||
func (t *Tree) ifControl() Node {
|
|
||||||
return t.newIf(t.parseControl(true, "if"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Range:
|
|
||||||
// {{range pipeline}} itemList {{end}}
|
|
||||||
// {{range pipeline}} itemList {{else}} itemList {{end}}
|
|
||||||
// Range keyword is past.
|
|
||||||
func (t *Tree) rangeControl() Node {
|
|
||||||
return t.newRange(t.parseControl(false, "range"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// With:
|
|
||||||
// {{with pipeline}} itemList {{end}}
|
|
||||||
// {{with pipeline}} itemList {{else}} itemList {{end}}
|
|
||||||
// If keyword is past.
|
|
||||||
func (t *Tree) withControl() Node {
|
|
||||||
return t.newWith(t.parseControl(false, "with"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// End:
|
|
||||||
// {{end}}
|
|
||||||
// End keyword is past.
|
|
||||||
func (t *Tree) endControl() Node {
|
|
||||||
return t.newEnd(t.expect(itemRightDelim, "end").pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Else:
|
|
||||||
// {{else}}
|
|
||||||
// Else keyword is past.
|
|
||||||
func (t *Tree) elseControl() Node {
|
|
||||||
// Special case for "else if".
|
|
||||||
peek := t.peekNonSpace()
|
|
||||||
if peek.typ == itemIf {
|
|
||||||
// We see "{{else if ... " but in effect rewrite it to {{else}}{{if ... ".
|
|
||||||
return t.newElse(peek.pos, t.lex.lineNumber())
|
|
||||||
}
|
|
||||||
return t.newElse(t.expect(itemRightDelim, "else").pos, t.lex.lineNumber())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Template:
|
|
||||||
// {{template stringValue pipeline}}
|
|
||||||
// Template keyword is past. The name must be something that can evaluate
|
|
||||||
// to a string.
|
|
||||||
func (t *Tree) templateControl() Node {
|
|
||||||
var name string
|
|
||||||
token := t.nextNonSpace()
|
|
||||||
switch token.typ {
|
|
||||||
case itemString, itemRawString:
|
|
||||||
s, err := strconv.Unquote(token.val)
|
|
||||||
if err != nil {
|
|
||||||
t.error(err)
|
|
||||||
}
|
|
||||||
name = s
|
|
||||||
default:
|
|
||||||
t.unexpected(token, "template invocation")
|
|
||||||
}
|
|
||||||
var pipe *PipeNode
|
|
||||||
if t.nextNonSpace().typ != itemRightDelim {
|
|
||||||
t.backup()
|
|
||||||
// Do not pop variables; they persist until "end".
|
|
||||||
pipe = t.pipeline("template")
|
|
||||||
}
|
|
||||||
return t.newTemplate(token.pos, t.lex.lineNumber(), name, pipe)
|
|
||||||
}
|
|
||||||
|
|
||||||
// command:
|
|
||||||
// operand (space operand)*
|
|
||||||
// space-separated arguments up to a pipeline character or right delimiter.
|
|
||||||
// we consume the pipe character but leave the right delim to terminate the action.
|
|
||||||
func (t *Tree) command() *CommandNode {
|
|
||||||
cmd := t.newCommand(t.peekNonSpace().pos)
|
|
||||||
for {
|
|
||||||
t.peekNonSpace() // skip leading spaces.
|
|
||||||
operand := t.operand()
|
|
||||||
if operand != nil {
|
|
||||||
cmd.append(operand)
|
|
||||||
}
|
|
||||||
switch token := t.next(); token.typ {
|
|
||||||
case itemSpace:
|
|
||||||
continue
|
|
||||||
case itemError:
|
|
||||||
t.errorf("%s", token.val)
|
|
||||||
case itemRightDelim, itemRightParen:
|
|
||||||
t.backup()
|
|
||||||
case itemPipe:
|
|
||||||
default:
|
|
||||||
t.errorf("unexpected %s in operand; missing space?", token)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if len(cmd.Args) == 0 {
|
|
||||||
t.errorf("empty command")
|
|
||||||
}
|
|
||||||
return cmd
|
|
||||||
}
|
|
||||||
|
|
||||||
// operand:
|
|
||||||
// term .Field*
|
|
||||||
// An operand is a space-separated component of a command,
|
|
||||||
// a term possibly followed by field accesses.
|
|
||||||
// A nil return means the next item is not an operand.
|
|
||||||
func (t *Tree) operand() Node {
|
|
||||||
node := t.term()
|
|
||||||
if node == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if t.peek().typ == itemField {
|
|
||||||
chain := t.newChain(t.peek().pos, node)
|
|
||||||
for t.peek().typ == itemField {
|
|
||||||
chain.Add(t.next().val)
|
|
||||||
}
|
|
||||||
// Compatibility with original API: If the term is of type NodeField
|
|
||||||
// or NodeVariable, just put more fields on the original.
|
|
||||||
// Otherwise, keep the Chain node.
|
|
||||||
// TODO: Switch to Chains always when we can.
|
|
||||||
switch node.Type() {
|
|
||||||
case NodeField:
|
|
||||||
node = t.newField(chain.Position(), chain.String())
|
|
||||||
case NodeVariable:
|
|
||||||
node = t.newVariable(chain.Position(), chain.String())
|
|
||||||
default:
|
|
||||||
node = chain
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
// term:
|
|
||||||
// literal (number, string, nil, boolean)
|
|
||||||
// function (identifier)
|
|
||||||
// .
|
|
||||||
// .Field
|
|
||||||
// $
|
|
||||||
// '(' pipeline ')'
|
|
||||||
// A term is a simple "expression".
|
|
||||||
// A nil return means the next item is not a term.
|
|
||||||
func (t *Tree) term() Node {
|
|
||||||
switch token := t.nextNonSpace(); token.typ {
|
|
||||||
case itemError:
|
|
||||||
t.errorf("%s", token.val)
|
|
||||||
case itemIdentifier:
|
|
||||||
if !t.hasFunction(token.val) {
|
|
||||||
t.errorf("function %q not defined", token.val)
|
|
||||||
}
|
|
||||||
return NewIdentifier(token.val).SetTree(t).SetPos(token.pos)
|
|
||||||
case itemDot:
|
|
||||||
return t.newDot(token.pos)
|
|
||||||
case itemNil:
|
|
||||||
return t.newNil(token.pos)
|
|
||||||
case itemVariable:
|
|
||||||
return t.useVar(token.pos, token.val)
|
|
||||||
case itemField:
|
|
||||||
return t.newField(token.pos, token.val)
|
|
||||||
case itemBool:
|
|
||||||
return t.newBool(token.pos, token.val == "true")
|
|
||||||
case itemCharConstant, itemComplex, itemNumber:
|
|
||||||
number, err := t.newNumber(token.pos, token.val, token.typ)
|
|
||||||
if err != nil {
|
|
||||||
t.error(err)
|
|
||||||
}
|
|
||||||
return number
|
|
||||||
case itemLeftParen:
|
|
||||||
pipe := t.pipeline("parenthesized pipeline")
|
|
||||||
if token := t.next(); token.typ != itemRightParen {
|
|
||||||
t.errorf("unclosed right paren: unexpected %s", token)
|
|
||||||
}
|
|
||||||
return pipe
|
|
||||||
case itemString, itemRawString:
|
|
||||||
s, err := strconv.Unquote(token.val)
|
|
||||||
if err != nil {
|
|
||||||
t.error(err)
|
|
||||||
}
|
|
||||||
return t.newString(token.pos, token.val, s)
|
|
||||||
}
|
|
||||||
t.backup()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// hasFunction reports if a function name exists in the Tree's maps.
|
|
||||||
func (t *Tree) hasFunction(name string) bool {
|
|
||||||
for _, funcMap := range t.funcs {
|
|
||||||
if funcMap == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if funcMap[name] != nil {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// popVars trims the variable list to the specified length
|
|
||||||
func (t *Tree) popVars(n int) {
|
|
||||||
t.vars = t.vars[:n]
|
|
||||||
}
|
|
||||||
|
|
||||||
// useVar returns a node for a variable reference. It errors if the
|
|
||||||
// variable is not defined.
|
|
||||||
func (t *Tree) useVar(pos Pos, name string) Node {
|
|
||||||
v := t.newVariable(pos, name)
|
|
||||||
for _, varName := range t.vars {
|
|
||||||
if varName == v.Ident[0] {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
t.errorf("undefined variable %q", v.Ident[0])
|
|
||||||
return nil
|
|
||||||
}
|
|
218
vendor/github.com/alecthomas/template/template.go
generated
vendored
218
vendor/github.com/alecthomas/template/template.go
generated
vendored
@ -1,218 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package template
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"github.com/alecthomas/template/parse"
|
|
||||||
)
|
|
||||||
|
|
||||||
// common holds the information shared by related templates.
|
|
||||||
type common struct {
|
|
||||||
tmpl map[string]*Template
|
|
||||||
// We use two maps, one for parsing and one for execution.
|
|
||||||
// This separation makes the API cleaner since it doesn't
|
|
||||||
// expose reflection to the client.
|
|
||||||
parseFuncs FuncMap
|
|
||||||
execFuncs map[string]reflect.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
// Template is the representation of a parsed template. The *parse.Tree
|
|
||||||
// field is exported only for use by html/template and should be treated
|
|
||||||
// as unexported by all other clients.
|
|
||||||
type Template struct {
|
|
||||||
name string
|
|
||||||
*parse.Tree
|
|
||||||
*common
|
|
||||||
leftDelim string
|
|
||||||
rightDelim string
|
|
||||||
}
|
|
||||||
|
|
||||||
// New allocates a new template with the given name.
|
|
||||||
func New(name string) *Template {
|
|
||||||
return &Template{
|
|
||||||
name: name,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Name returns the name of the template.
|
|
||||||
func (t *Template) Name() string {
|
|
||||||
return t.name
|
|
||||||
}
|
|
||||||
|
|
||||||
// New allocates a new template associated with the given one and with the same
|
|
||||||
// delimiters. The association, which is transitive, allows one template to
|
|
||||||
// invoke another with a {{template}} action.
|
|
||||||
func (t *Template) New(name string) *Template {
|
|
||||||
t.init()
|
|
||||||
return &Template{
|
|
||||||
name: name,
|
|
||||||
common: t.common,
|
|
||||||
leftDelim: t.leftDelim,
|
|
||||||
rightDelim: t.rightDelim,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Template) init() {
|
|
||||||
if t.common == nil {
|
|
||||||
t.common = new(common)
|
|
||||||
t.tmpl = make(map[string]*Template)
|
|
||||||
t.parseFuncs = make(FuncMap)
|
|
||||||
t.execFuncs = make(map[string]reflect.Value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clone returns a duplicate of the template, including all associated
|
|
||||||
// templates. The actual representation is not copied, but the name space of
|
|
||||||
// associated templates is, so further calls to Parse in the copy will add
|
|
||||||
// templates to the copy but not to the original. Clone can be used to prepare
|
|
||||||
// common templates and use them with variant definitions for other templates
|
|
||||||
// by adding the variants after the clone is made.
|
|
||||||
func (t *Template) Clone() (*Template, error) {
|
|
||||||
nt := t.copy(nil)
|
|
||||||
nt.init()
|
|
||||||
nt.tmpl[t.name] = nt
|
|
||||||
for k, v := range t.tmpl {
|
|
||||||
if k == t.name { // Already installed.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// The associated templates share nt's common structure.
|
|
||||||
tmpl := v.copy(nt.common)
|
|
||||||
nt.tmpl[k] = tmpl
|
|
||||||
}
|
|
||||||
for k, v := range t.parseFuncs {
|
|
||||||
nt.parseFuncs[k] = v
|
|
||||||
}
|
|
||||||
for k, v := range t.execFuncs {
|
|
||||||
nt.execFuncs[k] = v
|
|
||||||
}
|
|
||||||
return nt, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// copy returns a shallow copy of t, with common set to the argument.
|
|
||||||
func (t *Template) copy(c *common) *Template {
|
|
||||||
nt := New(t.name)
|
|
||||||
nt.Tree = t.Tree
|
|
||||||
nt.common = c
|
|
||||||
nt.leftDelim = t.leftDelim
|
|
||||||
nt.rightDelim = t.rightDelim
|
|
||||||
return nt
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddParseTree creates a new template with the name and parse tree
|
|
||||||
// and associates it with t.
|
|
||||||
func (t *Template) AddParseTree(name string, tree *parse.Tree) (*Template, error) {
|
|
||||||
if t.common != nil && t.tmpl[name] != nil {
|
|
||||||
return nil, fmt.Errorf("template: redefinition of template %q", name)
|
|
||||||
}
|
|
||||||
nt := t.New(name)
|
|
||||||
nt.Tree = tree
|
|
||||||
t.tmpl[name] = nt
|
|
||||||
return nt, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Templates returns a slice of the templates associated with t, including t
|
|
||||||
// itself.
|
|
||||||
func (t *Template) Templates() []*Template {
|
|
||||||
if t.common == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
// Return a slice so we don't expose the map.
|
|
||||||
m := make([]*Template, 0, len(t.tmpl))
|
|
||||||
for _, v := range t.tmpl {
|
|
||||||
m = append(m, v)
|
|
||||||
}
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delims sets the action delimiters to the specified strings, to be used in
|
|
||||||
// subsequent calls to Parse, ParseFiles, or ParseGlob. Nested template
|
|
||||||
// definitions will inherit the settings. An empty delimiter stands for the
|
|
||||||
// corresponding default: {{ or }}.
|
|
||||||
// The return value is the template, so calls can be chained.
|
|
||||||
func (t *Template) Delims(left, right string) *Template {
|
|
||||||
t.leftDelim = left
|
|
||||||
t.rightDelim = right
|
|
||||||
return t
|
|
||||||
}
|
|
||||||
|
|
||||||
// Funcs adds the elements of the argument map to the template's function map.
|
|
||||||
// It panics if a value in the map is not a function with appropriate return
|
|
||||||
// type. However, it is legal to overwrite elements of the map. The return
|
|
||||||
// value is the template, so calls can be chained.
|
|
||||||
func (t *Template) Funcs(funcMap FuncMap) *Template {
|
|
||||||
t.init()
|
|
||||||
addValueFuncs(t.execFuncs, funcMap)
|
|
||||||
addFuncs(t.parseFuncs, funcMap)
|
|
||||||
return t
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lookup returns the template with the given name that is associated with t,
|
|
||||||
// or nil if there is no such template.
|
|
||||||
func (t *Template) Lookup(name string) *Template {
|
|
||||||
if t.common == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return t.tmpl[name]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse parses a string into a template. Nested template definitions will be
|
|
||||||
// associated with the top-level template t. Parse may be called multiple times
|
|
||||||
// to parse definitions of templates to associate with t. It is an error if a
|
|
||||||
// resulting template is non-empty (contains content other than template
|
|
||||||
// definitions) and would replace a non-empty template with the same name.
|
|
||||||
// (In multiple calls to Parse with the same receiver template, only one call
|
|
||||||
// can contain text other than space, comments, and template definitions.)
|
|
||||||
func (t *Template) Parse(text string) (*Template, error) {
|
|
||||||
t.init()
|
|
||||||
trees, err := parse.Parse(t.name, text, t.leftDelim, t.rightDelim, t.parseFuncs, builtins)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// Add the newly parsed trees, including the one for t, into our common structure.
|
|
||||||
for name, tree := range trees {
|
|
||||||
// If the name we parsed is the name of this template, overwrite this template.
|
|
||||||
// The associate method checks it's not a redefinition.
|
|
||||||
tmpl := t
|
|
||||||
if name != t.name {
|
|
||||||
tmpl = t.New(name)
|
|
||||||
}
|
|
||||||
// Even if t == tmpl, we need to install it in the common.tmpl map.
|
|
||||||
if replace, err := t.associate(tmpl, tree); err != nil {
|
|
||||||
return nil, err
|
|
||||||
} else if replace {
|
|
||||||
tmpl.Tree = tree
|
|
||||||
}
|
|
||||||
tmpl.leftDelim = t.leftDelim
|
|
||||||
tmpl.rightDelim = t.rightDelim
|
|
||||||
}
|
|
||||||
return t, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// associate installs the new template into the group of templates associated
|
|
||||||
// with t. It is an error to reuse a name except to overwrite an empty
|
|
||||||
// template. The two are already known to share the common structure.
|
|
||||||
// The boolean return value reports wither to store this tree as t.Tree.
|
|
||||||
func (t *Template) associate(new *Template, tree *parse.Tree) (bool, error) {
|
|
||||||
if new.common != t.common {
|
|
||||||
panic("internal error: associate not common")
|
|
||||||
}
|
|
||||||
name := new.name
|
|
||||||
if old := t.tmpl[name]; old != nil {
|
|
||||||
oldIsEmpty := parse.IsEmptyTree(old.Root)
|
|
||||||
newIsEmpty := parse.IsEmptyTree(tree.Root)
|
|
||||||
if newIsEmpty {
|
|
||||||
// Whether old is empty or not, new is empty; no reason to replace old.
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
if !oldIsEmpty {
|
|
||||||
return false, fmt.Errorf("template: redefinition of template %q", name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
t.tmpl[name] = new
|
|
||||||
return true, nil
|
|
||||||
}
|
|
14
vendor/github.com/asaskevich/govalidator/.travis.yml
generated
vendored
14
vendor/github.com/asaskevich/govalidator/.travis.yml
generated
vendored
@ -1,14 +0,0 @@
|
|||||||
language: go
|
|
||||||
|
|
||||||
go:
|
|
||||||
- 1.1
|
|
||||||
- 1.2
|
|
||||||
- 1.3
|
|
||||||
- 1.4
|
|
||||||
- 1.5
|
|
||||||
- 1.6
|
|
||||||
- tip
|
|
||||||
|
|
||||||
notifications:
|
|
||||||
email:
|
|
||||||
- bwatas@gmail.com
|
|
63
vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
generated
vendored
63
vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
generated
vendored
@ -1,63 +0,0 @@
|
|||||||
#### Support
|
|
||||||
If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
|
|
||||||
|
|
||||||
#### What to contribute
|
|
||||||
If you don't know what to do, there are some features and functions that need to be done
|
|
||||||
|
|
||||||
- [ ] Refactor code
|
|
||||||
- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
|
|
||||||
- [ ] Create actual list of contributors and projects that currently using this package
|
|
||||||
- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
|
|
||||||
- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
|
|
||||||
- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
|
|
||||||
- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
|
|
||||||
- [ ] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
|
|
||||||
- [ ] Implement fuzzing testing
|
|
||||||
- [ ] Implement some struct/map/array utilities
|
|
||||||
- [ ] Implement map/array validation
|
|
||||||
- [ ] Implement benchmarking
|
|
||||||
- [ ] Implement batch of examples
|
|
||||||
- [ ] Look at forks for new features and fixes
|
|
||||||
|
|
||||||
#### Advice
|
|
||||||
Feel free to create what you want, but keep in mind when you implement new features:
|
|
||||||
- Code must be clear and readable, names of variables/constants clearly describes what they are doing
|
|
||||||
- Public functions must be documented and described in source file and added to README.md to the list of available functions
|
|
||||||
- There are must be unit-tests for any new functions and improvements
|
|
||||||
|
|
||||||
## Financial contributions
|
|
||||||
|
|
||||||
We also welcome financial contributions in full transparency on our [open collective](https://opencollective.com/govalidator).
|
|
||||||
Anyone can file an expense. If the expense makes sense for the development of the community, it will be "merged" in the ledger of our open collective by the core contributors and the person who filed the expense will be reimbursed.
|
|
||||||
|
|
||||||
|
|
||||||
## Credits
|
|
||||||
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
|
|
||||||
Thank you to all the people who have already contributed to govalidator!
|
|
||||||
<a href="graphs/contributors"><img src="https://opencollective.com/govalidator/contributors.svg?width=890" /></a>
|
|
||||||
|
|
||||||
|
|
||||||
### Backers
|
|
||||||
|
|
||||||
Thank you to all our backers! [[Become a backer](https://opencollective.com/govalidator#backer)]
|
|
||||||
|
|
||||||
<a href="https://opencollective.com/govalidator#backers" target="_blank"><img src="https://opencollective.com/govalidator/backers.svg?width=890"></a>
|
|
||||||
|
|
||||||
|
|
||||||
### Sponsors
|
|
||||||
|
|
||||||
Thank you to all our sponsors! (please ask your company to also support this open source project by [becoming a sponsor](https://opencollective.com/govalidator#sponsor))
|
|
||||||
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/0/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/0/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/1/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/1/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/2/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/2/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/3/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/3/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/4/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/4/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/5/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/5/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/6/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/6/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/7/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/7/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/8/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/8/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/9/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/9/avatar.svg"></a>
|
|
21
vendor/github.com/asaskevich/govalidator/LICENSE
generated
vendored
21
vendor/github.com/asaskevich/govalidator/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2014 Alex Saskevich
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
507
vendor/github.com/asaskevich/govalidator/README.md
generated
vendored
507
vendor/github.com/asaskevich/govalidator/README.md
generated
vendored
@ -1,507 +0,0 @@
|
|||||||
govalidator
|
|
||||||
===========
|
|
||||||
[](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [](https://godoc.org/github.com/asaskevich/govalidator) [](https://coveralls.io/r/asaskevich/govalidator?branch=master) [](https://app.wercker.com/project/bykey/1ec990b09ea86c910d5f08b0e02c6043)
|
|
||||||
[](https://travis-ci.org/asaskevich/govalidator) [](https://goreportcard.com/report/github.com/asaskevich/govalidator) [](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator) [](#backers) [](#sponsors) [](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_shield)
|
|
||||||
|
|
||||||
A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js).
|
|
||||||
|
|
||||||
#### Installation
|
|
||||||
Make sure that Go is installed on your computer.
|
|
||||||
Type the following command in your terminal:
|
|
||||||
|
|
||||||
go get github.com/asaskevich/govalidator
|
|
||||||
|
|
||||||
or you can get specified release of the package with `gopkg.in`:
|
|
||||||
|
|
||||||
go get gopkg.in/asaskevich/govalidator.v4
|
|
||||||
|
|
||||||
After it the package is ready to use.
|
|
||||||
|
|
||||||
|
|
||||||
#### Import package in your project
|
|
||||||
Add following line in your `*.go` file:
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
```
|
|
||||||
If you are unhappy to use long `govalidator`, you can do something like this:
|
|
||||||
```go
|
|
||||||
import (
|
|
||||||
valid "github.com/asaskevich/govalidator"
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Activate behavior to require all fields have a validation tag by default
|
|
||||||
`SetFieldsRequiredByDefault` causes validation to fail when struct fields do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). A good place to activate this is a package init function or the main() function.
|
|
||||||
|
|
||||||
`SetNilPtrAllowedByRequired` causes validation to pass when struct fields marked by `required` are set to nil. This is disabled by default for consistency, but some packages that need to be able to determine between `nil` and `zero value` state can use this. If disabled, both `nil` and `zero` values cause validation errors.
|
|
||||||
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
govalidator.SetFieldsRequiredByDefault(true)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Here's some code to explain it:
|
|
||||||
```go
|
|
||||||
// this struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
|
|
||||||
type exampleStruct struct {
|
|
||||||
Name string ``
|
|
||||||
Email string `valid:"email"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// this, however, will only fail when Email is empty or an invalid email address:
|
|
||||||
type exampleStruct2 struct {
|
|
||||||
Name string `valid:"-"`
|
|
||||||
Email string `valid:"email"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// lastly, this will only fail when Email is an invalid email address but not when it's empty:
|
|
||||||
type exampleStruct2 struct {
|
|
||||||
Name string `valid:"-"`
|
|
||||||
Email string `valid:"email,optional"`
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Recent breaking changes (see [#123](https://github.com/asaskevich/govalidator/pull/123))
|
|
||||||
##### Custom validator function signature
|
|
||||||
A context was added as the second parameter, for structs this is the object being validated – this makes dependent validation possible.
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
|
|
||||||
// old signature
|
|
||||||
func(i interface{}) bool
|
|
||||||
|
|
||||||
// new signature
|
|
||||||
func(i interface{}, o interface{}) bool
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Adding a custom validator
|
|
||||||
This was changed to prevent data races when accessing custom validators.
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
|
|
||||||
// before
|
|
||||||
govalidator.CustomTypeTagMap["customByteArrayValidator"] = CustomTypeValidator(func(i interface{}, o interface{}) bool {
|
|
||||||
// ...
|
|
||||||
})
|
|
||||||
|
|
||||||
// after
|
|
||||||
govalidator.CustomTypeTagMap.Set("customByteArrayValidator", CustomTypeValidator(func(i interface{}, o interface{}) bool {
|
|
||||||
// ...
|
|
||||||
}))
|
|
||||||
```
|
|
||||||
|
|
||||||
#### List of functions:
|
|
||||||
```go
|
|
||||||
func Abs(value float64) float64
|
|
||||||
func BlackList(str, chars string) string
|
|
||||||
func ByteLength(str string, params ...string) bool
|
|
||||||
func CamelCaseToUnderscore(str string) string
|
|
||||||
func Contains(str, substring string) bool
|
|
||||||
func Count(array []interface{}, iterator ConditionIterator) int
|
|
||||||
func Each(array []interface{}, iterator Iterator)
|
|
||||||
func ErrorByField(e error, field string) string
|
|
||||||
func ErrorsByField(e error) map[string]string
|
|
||||||
func Filter(array []interface{}, iterator ConditionIterator) []interface{}
|
|
||||||
func Find(array []interface{}, iterator ConditionIterator) interface{}
|
|
||||||
func GetLine(s string, index int) (string, error)
|
|
||||||
func GetLines(s string) []string
|
|
||||||
func InRange(value, left, right float64) bool
|
|
||||||
func IsASCII(str string) bool
|
|
||||||
func IsAlpha(str string) bool
|
|
||||||
func IsAlphanumeric(str string) bool
|
|
||||||
func IsBase64(str string) bool
|
|
||||||
func IsByteLength(str string, min, max int) bool
|
|
||||||
func IsCIDR(str string) bool
|
|
||||||
func IsCreditCard(str string) bool
|
|
||||||
func IsDNSName(str string) bool
|
|
||||||
func IsDataURI(str string) bool
|
|
||||||
func IsDialString(str string) bool
|
|
||||||
func IsDivisibleBy(str, num string) bool
|
|
||||||
func IsEmail(str string) bool
|
|
||||||
func IsFilePath(str string) (bool, int)
|
|
||||||
func IsFloat(str string) bool
|
|
||||||
func IsFullWidth(str string) bool
|
|
||||||
func IsHalfWidth(str string) bool
|
|
||||||
func IsHexadecimal(str string) bool
|
|
||||||
func IsHexcolor(str string) bool
|
|
||||||
func IsHost(str string) bool
|
|
||||||
func IsIP(str string) bool
|
|
||||||
func IsIPv4(str string) bool
|
|
||||||
func IsIPv6(str string) bool
|
|
||||||
func IsISBN(str string, version int) bool
|
|
||||||
func IsISBN10(str string) bool
|
|
||||||
func IsISBN13(str string) bool
|
|
||||||
func IsISO3166Alpha2(str string) bool
|
|
||||||
func IsISO3166Alpha3(str string) bool
|
|
||||||
func IsISO693Alpha2(str string) bool
|
|
||||||
func IsISO693Alpha3b(str string) bool
|
|
||||||
func IsISO4217(str string) bool
|
|
||||||
func IsIn(str string, params ...string) bool
|
|
||||||
func IsInt(str string) bool
|
|
||||||
func IsJSON(str string) bool
|
|
||||||
func IsLatitude(str string) bool
|
|
||||||
func IsLongitude(str string) bool
|
|
||||||
func IsLowerCase(str string) bool
|
|
||||||
func IsMAC(str string) bool
|
|
||||||
func IsMongoID(str string) bool
|
|
||||||
func IsMultibyte(str string) bool
|
|
||||||
func IsNatural(value float64) bool
|
|
||||||
func IsNegative(value float64) bool
|
|
||||||
func IsNonNegative(value float64) bool
|
|
||||||
func IsNonPositive(value float64) bool
|
|
||||||
func IsNull(str string) bool
|
|
||||||
func IsNumeric(str string) bool
|
|
||||||
func IsPort(str string) bool
|
|
||||||
func IsPositive(value float64) bool
|
|
||||||
func IsPrintableASCII(str string) bool
|
|
||||||
func IsRFC3339(str string) bool
|
|
||||||
func IsRFC3339WithoutZone(str string) bool
|
|
||||||
func IsRGBcolor(str string) bool
|
|
||||||
func IsRequestURI(rawurl string) bool
|
|
||||||
func IsRequestURL(rawurl string) bool
|
|
||||||
func IsSSN(str string) bool
|
|
||||||
func IsSemver(str string) bool
|
|
||||||
func IsTime(str string, format string) bool
|
|
||||||
func IsURL(str string) bool
|
|
||||||
func IsUTFDigit(str string) bool
|
|
||||||
func IsUTFLetter(str string) bool
|
|
||||||
func IsUTFLetterNumeric(str string) bool
|
|
||||||
func IsUTFNumeric(str string) bool
|
|
||||||
func IsUUID(str string) bool
|
|
||||||
func IsUUIDv3(str string) bool
|
|
||||||
func IsUUIDv4(str string) bool
|
|
||||||
func IsUUIDv5(str string) bool
|
|
||||||
func IsUpperCase(str string) bool
|
|
||||||
func IsVariableWidth(str string) bool
|
|
||||||
func IsWhole(value float64) bool
|
|
||||||
func LeftTrim(str, chars string) string
|
|
||||||
func Map(array []interface{}, iterator ResultIterator) []interface{}
|
|
||||||
func Matches(str, pattern string) bool
|
|
||||||
func NormalizeEmail(str string) (string, error)
|
|
||||||
func PadBoth(str string, padStr string, padLen int) string
|
|
||||||
func PadLeft(str string, padStr string, padLen int) string
|
|
||||||
func PadRight(str string, padStr string, padLen int) string
|
|
||||||
func Range(str string, params ...string) bool
|
|
||||||
func RemoveTags(s string) string
|
|
||||||
func ReplacePattern(str, pattern, replace string) string
|
|
||||||
func Reverse(s string) string
|
|
||||||
func RightTrim(str, chars string) string
|
|
||||||
func RuneLength(str string, params ...string) bool
|
|
||||||
func SafeFileName(str string) string
|
|
||||||
func SetFieldsRequiredByDefault(value bool)
|
|
||||||
func Sign(value float64) float64
|
|
||||||
func StringLength(str string, params ...string) bool
|
|
||||||
func StringMatches(s string, params ...string) bool
|
|
||||||
func StripLow(str string, keepNewLines bool) string
|
|
||||||
func ToBoolean(str string) (bool, error)
|
|
||||||
func ToFloat(str string) (float64, error)
|
|
||||||
func ToInt(str string) (int64, error)
|
|
||||||
func ToJSON(obj interface{}) (string, error)
|
|
||||||
func ToString(obj interface{}) string
|
|
||||||
func Trim(str, chars string) string
|
|
||||||
func Truncate(str string, length int, ending string) string
|
|
||||||
func UnderscoreToCamelCase(s string) string
|
|
||||||
func ValidateStruct(s interface{}) (bool, error)
|
|
||||||
func WhiteList(str, chars string) string
|
|
||||||
type ConditionIterator
|
|
||||||
type CustomTypeValidator
|
|
||||||
type Error
|
|
||||||
func (e Error) Error() string
|
|
||||||
type Errors
|
|
||||||
func (es Errors) Error() string
|
|
||||||
func (es Errors) Errors() []error
|
|
||||||
type ISO3166Entry
|
|
||||||
type Iterator
|
|
||||||
type ParamValidator
|
|
||||||
type ResultIterator
|
|
||||||
type UnsupportedTypeError
|
|
||||||
func (e *UnsupportedTypeError) Error() string
|
|
||||||
type Validator
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Examples
|
|
||||||
###### IsURL
|
|
||||||
```go
|
|
||||||
println(govalidator.IsURL(`http://user@pass:domain.com/path/page`))
|
|
||||||
```
|
|
||||||
###### ToString
|
|
||||||
```go
|
|
||||||
type User struct {
|
|
||||||
FirstName string
|
|
||||||
LastName string
|
|
||||||
}
|
|
||||||
|
|
||||||
str := govalidator.ToString(&User{"John", "Juan"})
|
|
||||||
println(str)
|
|
||||||
```
|
|
||||||
###### Each, Map, Filter, Count for slices
|
|
||||||
Each iterates over the slice/array and calls Iterator for every item
|
|
||||||
```go
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5}
|
|
||||||
var fn govalidator.Iterator = func(value interface{}, index int) {
|
|
||||||
println(value.(int))
|
|
||||||
}
|
|
||||||
govalidator.Each(data, fn)
|
|
||||||
```
|
|
||||||
```go
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5}
|
|
||||||
var fn govalidator.ResultIterator = func(value interface{}, index int) interface{} {
|
|
||||||
return value.(int) * 3
|
|
||||||
}
|
|
||||||
_ = govalidator.Map(data, fn) // result = []interface{}{1, 6, 9, 12, 15}
|
|
||||||
```
|
|
||||||
```go
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
|
|
||||||
var fn govalidator.ConditionIterator = func(value interface{}, index int) bool {
|
|
||||||
return value.(int)%2 == 0
|
|
||||||
}
|
|
||||||
_ = govalidator.Filter(data, fn) // result = []interface{}{2, 4, 6, 8, 10}
|
|
||||||
_ = govalidator.Count(data, fn) // result = 5
|
|
||||||
```
|
|
||||||
###### ValidateStruct [#2](https://github.com/asaskevich/govalidator/pull/2)
|
|
||||||
If you want to validate structs, you can use tag `valid` for any field in your structure. All validators used with this field in one tag are separated by comma. If you want to skip validation, place `-` in your tag. If you need a validator that is not on the list below, you can add it like this:
|
|
||||||
```go
|
|
||||||
govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
|
|
||||||
return str == "duck"
|
|
||||||
})
|
|
||||||
```
|
|
||||||
For completely custom validators (interface-based), see below.
|
|
||||||
|
|
||||||
Here is a list of available validators for struct fields (validator - used function):
|
|
||||||
```go
|
|
||||||
"email": IsEmail,
|
|
||||||
"url": IsURL,
|
|
||||||
"dialstring": IsDialString,
|
|
||||||
"requrl": IsRequestURL,
|
|
||||||
"requri": IsRequestURI,
|
|
||||||
"alpha": IsAlpha,
|
|
||||||
"utfletter": IsUTFLetter,
|
|
||||||
"alphanum": IsAlphanumeric,
|
|
||||||
"utfletternum": IsUTFLetterNumeric,
|
|
||||||
"numeric": IsNumeric,
|
|
||||||
"utfnumeric": IsUTFNumeric,
|
|
||||||
"utfdigit": IsUTFDigit,
|
|
||||||
"hexadecimal": IsHexadecimal,
|
|
||||||
"hexcolor": IsHexcolor,
|
|
||||||
"rgbcolor": IsRGBcolor,
|
|
||||||
"lowercase": IsLowerCase,
|
|
||||||
"uppercase": IsUpperCase,
|
|
||||||
"int": IsInt,
|
|
||||||
"float": IsFloat,
|
|
||||||
"null": IsNull,
|
|
||||||
"uuid": IsUUID,
|
|
||||||
"uuidv3": IsUUIDv3,
|
|
||||||
"uuidv4": IsUUIDv4,
|
|
||||||
"uuidv5": IsUUIDv5,
|
|
||||||
"creditcard": IsCreditCard,
|
|
||||||
"isbn10": IsISBN10,
|
|
||||||
"isbn13": IsISBN13,
|
|
||||||
"json": IsJSON,
|
|
||||||
"multibyte": IsMultibyte,
|
|
||||||
"ascii": IsASCII,
|
|
||||||
"printableascii": IsPrintableASCII,
|
|
||||||
"fullwidth": IsFullWidth,
|
|
||||||
"halfwidth": IsHalfWidth,
|
|
||||||
"variablewidth": IsVariableWidth,
|
|
||||||
"base64": IsBase64,
|
|
||||||
"datauri": IsDataURI,
|
|
||||||
"ip": IsIP,
|
|
||||||
"port": IsPort,
|
|
||||||
"ipv4": IsIPv4,
|
|
||||||
"ipv6": IsIPv6,
|
|
||||||
"dns": IsDNSName,
|
|
||||||
"host": IsHost,
|
|
||||||
"mac": IsMAC,
|
|
||||||
"latitude": IsLatitude,
|
|
||||||
"longitude": IsLongitude,
|
|
||||||
"ssn": IsSSN,
|
|
||||||
"semver": IsSemver,
|
|
||||||
"rfc3339": IsRFC3339,
|
|
||||||
"rfc3339WithoutZone": IsRFC3339WithoutZone,
|
|
||||||
"ISO3166Alpha2": IsISO3166Alpha2,
|
|
||||||
"ISO3166Alpha3": IsISO3166Alpha3,
|
|
||||||
```
|
|
||||||
Validators with parameters
|
|
||||||
|
|
||||||
```go
|
|
||||||
"range(min|max)": Range,
|
|
||||||
"length(min|max)": ByteLength,
|
|
||||||
"runelength(min|max)": RuneLength,
|
|
||||||
"stringlength(min|max)": StringLength,
|
|
||||||
"matches(pattern)": StringMatches,
|
|
||||||
"in(string1|string2|...|stringN)": IsIn,
|
|
||||||
"rsapub(keylength)" : IsRsaPub,
|
|
||||||
```
|
|
||||||
|
|
||||||
And here is small example of usage:
|
|
||||||
```go
|
|
||||||
type Post struct {
|
|
||||||
Title string `valid:"alphanum,required"`
|
|
||||||
Message string `valid:"duck,ascii"`
|
|
||||||
Message2 string `valid:"animal(dog)"`
|
|
||||||
AuthorIP string `valid:"ipv4"`
|
|
||||||
Date string `valid:"-"`
|
|
||||||
}
|
|
||||||
post := &Post{
|
|
||||||
Title: "My Example Post",
|
|
||||||
Message: "duck",
|
|
||||||
Message2: "dog",
|
|
||||||
AuthorIP: "123.234.54.3",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add your own struct validation tags
|
|
||||||
govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
|
|
||||||
return str == "duck"
|
|
||||||
})
|
|
||||||
|
|
||||||
// Add your own struct validation tags with parameter
|
|
||||||
govalidator.ParamTagMap["animal"] = govalidator.ParamValidator(func(str string, params ...string) bool {
|
|
||||||
species := params[0]
|
|
||||||
return str == species
|
|
||||||
})
|
|
||||||
govalidator.ParamTagRegexMap["animal"] = regexp.MustCompile("^animal\\((\\w+)\\)$")
|
|
||||||
|
|
||||||
result, err := govalidator.ValidateStruct(post)
|
|
||||||
if err != nil {
|
|
||||||
println("error: " + err.Error())
|
|
||||||
}
|
|
||||||
println(result)
|
|
||||||
```
|
|
||||||
###### WhiteList
|
|
||||||
```go
|
|
||||||
// Remove all characters from string ignoring characters between "a" and "z"
|
|
||||||
println(govalidator.WhiteList("a3a43a5a4a3a2a23a4a5a4a3a4", "a-z") == "aaaaaaaaaaaa")
|
|
||||||
```
|
|
||||||
|
|
||||||
###### Custom validation functions
|
|
||||||
Custom validation using your own domain specific validators is also available - here's an example of how to use it:
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
|
|
||||||
type CustomByteArray [6]byte // custom types are supported and can be validated
|
|
||||||
|
|
||||||
type StructWithCustomByteArray struct {
|
|
||||||
ID CustomByteArray `valid:"customByteArrayValidator,customMinLengthValidator"` // multiple custom validators are possible as well and will be evaluated in sequence
|
|
||||||
Email string `valid:"email"`
|
|
||||||
CustomMinLength int `valid:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
govalidator.CustomTypeTagMap.Set("customByteArrayValidator", CustomTypeValidator(func(i interface{}, context interface{}) bool {
|
|
||||||
switch v := context.(type) { // you can type switch on the context interface being validated
|
|
||||||
case StructWithCustomByteArray:
|
|
||||||
// you can check and validate against some other field in the context,
|
|
||||||
// return early or not validate against the context at all – your choice
|
|
||||||
case SomeOtherType:
|
|
||||||
// ...
|
|
||||||
default:
|
|
||||||
// expecting some other type? Throw/panic here or continue
|
|
||||||
}
|
|
||||||
|
|
||||||
switch v := i.(type) { // type switch on the struct field being validated
|
|
||||||
case CustomByteArray:
|
|
||||||
for _, e := range v { // this validator checks that the byte array is not empty, i.e. not all zeroes
|
|
||||||
if e != 0 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}))
|
|
||||||
govalidator.CustomTypeTagMap.Set("customMinLengthValidator", CustomTypeValidator(func(i interface{}, context interface{}) bool {
|
|
||||||
switch v := context.(type) { // this validates a field against the value in another field, i.e. dependent validation
|
|
||||||
case StructWithCustomByteArray:
|
|
||||||
return len(v.ID) >= v.CustomMinLength
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}))
|
|
||||||
```
|
|
||||||
|
|
||||||
###### Custom error messages
|
|
||||||
Custom error messages are supported via annotations by adding the `~` separator - here's an example of how to use it:
|
|
||||||
```go
|
|
||||||
type Ticket struct {
|
|
||||||
Id int64 `json:"id"`
|
|
||||||
FirstName string `json:"firstname" valid:"required~First name is blank"`
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Notes
|
|
||||||
Documentation is available here: [godoc.org](https://godoc.org/github.com/asaskevich/govalidator).
|
|
||||||
Full information about code coverage is also available here: [govalidator on gocover.io](http://gocover.io/github.com/asaskevich/govalidator).
|
|
||||||
|
|
||||||
#### Support
|
|
||||||
If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
|
|
||||||
|
|
||||||
#### What to contribute
|
|
||||||
If you don't know what to do, there are some features and functions that need to be done
|
|
||||||
|
|
||||||
- [ ] Refactor code
|
|
||||||
- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
|
|
||||||
- [ ] Create actual list of contributors and projects that currently using this package
|
|
||||||
- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
|
|
||||||
- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
|
|
||||||
- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
|
|
||||||
- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
|
|
||||||
- [ ] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
|
|
||||||
- [ ] Implement fuzzing testing
|
|
||||||
- [ ] Implement some struct/map/array utilities
|
|
||||||
- [ ] Implement map/array validation
|
|
||||||
- [ ] Implement benchmarking
|
|
||||||
- [ ] Implement batch of examples
|
|
||||||
- [ ] Look at forks for new features and fixes
|
|
||||||
|
|
||||||
#### Advice
|
|
||||||
Feel free to create what you want, but keep in mind when you implement new features:
|
|
||||||
- Code must be clear and readable, names of variables/constants clearly describes what they are doing
|
|
||||||
- Public functions must be documented and described in source file and added to README.md to the list of available functions
|
|
||||||
- There are must be unit-tests for any new functions and improvements
|
|
||||||
|
|
||||||
## Credits
|
|
||||||
### Contributors
|
|
||||||
|
|
||||||
This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)].
|
|
||||||
|
|
||||||
#### Special thanks to [contributors](https://github.com/asaskevich/govalidator/graphs/contributors)
|
|
||||||
* [Daniel Lohse](https://github.com/annismckenzie)
|
|
||||||
* [Attila Oláh](https://github.com/attilaolah)
|
|
||||||
* [Daniel Korner](https://github.com/Dadie)
|
|
||||||
* [Steven Wilkin](https://github.com/stevenwilkin)
|
|
||||||
* [Deiwin Sarjas](https://github.com/deiwin)
|
|
||||||
* [Noah Shibley](https://github.com/slugmobile)
|
|
||||||
* [Nathan Davies](https://github.com/nathj07)
|
|
||||||
* [Matt Sanford](https://github.com/mzsanford)
|
|
||||||
* [Simon ccl1115](https://github.com/ccl1115)
|
|
||||||
|
|
||||||
<a href="graphs/contributors"><img src="https://opencollective.com/govalidator/contributors.svg?width=890" /></a>
|
|
||||||
|
|
||||||
|
|
||||||
### Backers
|
|
||||||
|
|
||||||
Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/govalidator#backer)]
|
|
||||||
|
|
||||||
<a href="https://opencollective.com/govalidator#backers" target="_blank"><img src="https://opencollective.com/govalidator/backers.svg?width=890"></a>
|
|
||||||
|
|
||||||
|
|
||||||
### Sponsors
|
|
||||||
|
|
||||||
Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/govalidator#sponsor)]
|
|
||||||
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/0/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/0/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/1/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/1/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/2/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/2/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/3/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/3/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/4/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/4/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/5/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/5/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/6/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/6/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/7/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/7/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/8/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/8/avatar.svg"></a>
|
|
||||||
<a href="https://opencollective.com/govalidator/sponsor/9/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/9/avatar.svg"></a>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## License
|
|
||||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_large)
|
|
58
vendor/github.com/asaskevich/govalidator/arrays.go
generated
vendored
58
vendor/github.com/asaskevich/govalidator/arrays.go
generated
vendored
@ -1,58 +0,0 @@
|
|||||||
package govalidator
|
|
||||||
|
|
||||||
// Iterator is the function that accepts element of slice/array and its index
|
|
||||||
type Iterator func(interface{}, int)
|
|
||||||
|
|
||||||
// ResultIterator is the function that accepts element of slice/array and its index and returns any result
|
|
||||||
type ResultIterator func(interface{}, int) interface{}
|
|
||||||
|
|
||||||
// ConditionIterator is the function that accepts element of slice/array and its index and returns boolean
|
|
||||||
type ConditionIterator func(interface{}, int) bool
|
|
||||||
|
|
||||||
// Each iterates over the slice and apply Iterator to every item
|
|
||||||
func Each(array []interface{}, iterator Iterator) {
|
|
||||||
for index, data := range array {
|
|
||||||
iterator(data, index)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map iterates over the slice and apply ResultIterator to every item. Returns new slice as a result.
|
|
||||||
func Map(array []interface{}, iterator ResultIterator) []interface{} {
|
|
||||||
var result = make([]interface{}, len(array))
|
|
||||||
for index, data := range array {
|
|
||||||
result[index] = iterator(data, index)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find iterates over the slice and apply ConditionIterator to every item. Returns first item that meet ConditionIterator or nil otherwise.
|
|
||||||
func Find(array []interface{}, iterator ConditionIterator) interface{} {
|
|
||||||
for index, data := range array {
|
|
||||||
if iterator(data, index) {
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter iterates over the slice and apply ConditionIterator to every item. Returns new slice.
|
|
||||||
func Filter(array []interface{}, iterator ConditionIterator) []interface{} {
|
|
||||||
var result = make([]interface{}, 0)
|
|
||||||
for index, data := range array {
|
|
||||||
if iterator(data, index) {
|
|
||||||
result = append(result, data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Count iterates over the slice and apply ConditionIterator to every item. Returns count of items that meets ConditionIterator.
|
|
||||||
func Count(array []interface{}, iterator ConditionIterator) int {
|
|
||||||
count := 0
|
|
||||||
for index, data := range array {
|
|
||||||
if iterator(data, index) {
|
|
||||||
count = count + 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return count
|
|
||||||
}
|
|
64
vendor/github.com/asaskevich/govalidator/converter.go
generated
vendored
64
vendor/github.com/asaskevich/govalidator/converter.go
generated
vendored
@ -1,64 +0,0 @@
|
|||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
"strconv"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ToString convert the input to a string.
|
|
||||||
func ToString(obj interface{}) string {
|
|
||||||
res := fmt.Sprintf("%v", obj)
|
|
||||||
return string(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToJSON convert the input to a valid JSON string
|
|
||||||
func ToJSON(obj interface{}) (string, error) {
|
|
||||||
res, err := json.Marshal(obj)
|
|
||||||
if err != nil {
|
|
||||||
res = []byte("")
|
|
||||||
}
|
|
||||||
return string(res), err
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToFloat convert the input string to a float, or 0.0 if the input is not a float.
|
|
||||||
func ToFloat(str string) (float64, error) {
|
|
||||||
res, err := strconv.ParseFloat(str, 64)
|
|
||||||
if err != nil {
|
|
||||||
res = 0.0
|
|
||||||
}
|
|
||||||
return res, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToInt convert the input string or any int type to an integer type 64, or 0 if the input is not an integer.
|
|
||||||
func ToInt(value interface{}) (res int64, err error) {
|
|
||||||
val := reflect.ValueOf(value)
|
|
||||||
|
|
||||||
switch value.(type) {
|
|
||||||
case int, int8, int16, int32, int64:
|
|
||||||
res = val.Int()
|
|
||||||
case uint, uint8, uint16, uint32, uint64:
|
|
||||||
res = int64(val.Uint())
|
|
||||||
case string:
|
|
||||||
if IsInt(val.String()) {
|
|
||||||
res, err = strconv.ParseInt(val.String(), 0, 64)
|
|
||||||
if err != nil {
|
|
||||||
res = 0
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
err = fmt.Errorf("math: square root of negative number %g", value)
|
|
||||||
res = 0
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
err = fmt.Errorf("math: square root of negative number %g", value)
|
|
||||||
res = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToBoolean convert the input string to a boolean.
|
|
||||||
func ToBoolean(str string) (bool, error) {
|
|
||||||
return strconv.ParseBool(str)
|
|
||||||
}
|
|
43
vendor/github.com/asaskevich/govalidator/error.go
generated
vendored
43
vendor/github.com/asaskevich/govalidator/error.go
generated
vendored
@ -1,43 +0,0 @@
|
|||||||
package govalidator
|
|
||||||
|
|
||||||
import "strings"
|
|
||||||
|
|
||||||
// Errors is an array of multiple errors and conforms to the error interface.
|
|
||||||
type Errors []error
|
|
||||||
|
|
||||||
// Errors returns itself.
|
|
||||||
func (es Errors) Errors() []error {
|
|
||||||
return es
|
|
||||||
}
|
|
||||||
|
|
||||||
func (es Errors) Error() string {
|
|
||||||
var errs []string
|
|
||||||
for _, e := range es {
|
|
||||||
errs = append(errs, e.Error())
|
|
||||||
}
|
|
||||||
return strings.Join(errs, ";")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Error encapsulates a name, an error and whether there's a custom error message or not.
|
|
||||||
type Error struct {
|
|
||||||
Name string
|
|
||||||
Err error
|
|
||||||
CustomErrorMessageExists bool
|
|
||||||
|
|
||||||
// Validator indicates the name of the validator that failed
|
|
||||||
Validator string
|
|
||||||
Path []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e Error) Error() string {
|
|
||||||
if e.CustomErrorMessageExists {
|
|
||||||
return e.Err.Error()
|
|
||||||
}
|
|
||||||
|
|
||||||
errName := e.Name
|
|
||||||
if len(e.Path) > 0 {
|
|
||||||
errName = strings.Join(append(e.Path, e.Name), ".")
|
|
||||||
}
|
|
||||||
|
|
||||||
return errName + ": " + e.Err.Error()
|
|
||||||
}
|
|
97
vendor/github.com/asaskevich/govalidator/numerics.go
generated
vendored
97
vendor/github.com/asaskevich/govalidator/numerics.go
generated
vendored
@ -1,97 +0,0 @@
|
|||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"math"
|
|
||||||
"reflect"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Abs returns absolute value of number
|
|
||||||
func Abs(value float64) float64 {
|
|
||||||
return math.Abs(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sign returns signum of number: 1 in case of value > 0, -1 in case of value < 0, 0 otherwise
|
|
||||||
func Sign(value float64) float64 {
|
|
||||||
if value > 0 {
|
|
||||||
return 1
|
|
||||||
} else if value < 0 {
|
|
||||||
return -1
|
|
||||||
} else {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsNegative returns true if value < 0
|
|
||||||
func IsNegative(value float64) bool {
|
|
||||||
return value < 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsPositive returns true if value > 0
|
|
||||||
func IsPositive(value float64) bool {
|
|
||||||
return value > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsNonNegative returns true if value >= 0
|
|
||||||
func IsNonNegative(value float64) bool {
|
|
||||||
return value >= 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsNonPositive returns true if value <= 0
|
|
||||||
func IsNonPositive(value float64) bool {
|
|
||||||
return value <= 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// InRange returns true if value lies between left and right border
|
|
||||||
func InRangeInt(value, left, right interface{}) bool {
|
|
||||||
value64, _ := ToInt(value)
|
|
||||||
left64, _ := ToInt(left)
|
|
||||||
right64, _ := ToInt(right)
|
|
||||||
if left64 > right64 {
|
|
||||||
left64, right64 = right64, left64
|
|
||||||
}
|
|
||||||
return value64 >= left64 && value64 <= right64
|
|
||||||
}
|
|
||||||
|
|
||||||
// InRange returns true if value lies between left and right border
|
|
||||||
func InRangeFloat32(value, left, right float32) bool {
|
|
||||||
if left > right {
|
|
||||||
left, right = right, left
|
|
||||||
}
|
|
||||||
return value >= left && value <= right
|
|
||||||
}
|
|
||||||
|
|
||||||
// InRange returns true if value lies between left and right border
|
|
||||||
func InRangeFloat64(value, left, right float64) bool {
|
|
||||||
if left > right {
|
|
||||||
left, right = right, left
|
|
||||||
}
|
|
||||||
return value >= left && value <= right
|
|
||||||
}
|
|
||||||
|
|
||||||
// InRange returns true if value lies between left and right border, generic type to handle int, float32 or float64, all types must the same type
|
|
||||||
func InRange(value interface{}, left interface{}, right interface{}) bool {
|
|
||||||
|
|
||||||
reflectValue := reflect.TypeOf(value).Kind()
|
|
||||||
reflectLeft := reflect.TypeOf(left).Kind()
|
|
||||||
reflectRight := reflect.TypeOf(right).Kind()
|
|
||||||
|
|
||||||
if reflectValue == reflect.Int && reflectLeft == reflect.Int && reflectRight == reflect.Int {
|
|
||||||
return InRangeInt(value.(int), left.(int), right.(int))
|
|
||||||
} else if reflectValue == reflect.Float32 && reflectLeft == reflect.Float32 && reflectRight == reflect.Float32 {
|
|
||||||
return InRangeFloat32(value.(float32), left.(float32), right.(float32))
|
|
||||||
} else if reflectValue == reflect.Float64 && reflectLeft == reflect.Float64 && reflectRight == reflect.Float64 {
|
|
||||||
return InRangeFloat64(value.(float64), left.(float64), right.(float64))
|
|
||||||
} else {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsWhole returns true if value is whole number
|
|
||||||
func IsWhole(value float64) bool {
|
|
||||||
return math.Remainder(value, 1) == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsNatural returns true if value is natural number (positive and whole)
|
|
||||||
func IsNatural(value float64) bool {
|
|
||||||
return IsWhole(value) && IsPositive(value)
|
|
||||||
}
|
|
101
vendor/github.com/asaskevich/govalidator/patterns.go
generated
vendored
101
vendor/github.com/asaskevich/govalidator/patterns.go
generated
vendored
@ -1,101 +0,0 @@
|
|||||||
package govalidator
|
|
||||||
|
|
||||||
import "regexp"
|
|
||||||
|
|
||||||
// Basic regular expressions for validating strings
|
|
||||||
const (
|
|
||||||
Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
|
|
||||||
CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11})$"
|
|
||||||
ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$"
|
|
||||||
ISBN13 string = "^(?:[0-9]{13})$"
|
|
||||||
UUID3 string = "^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$"
|
|
||||||
UUID4 string = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
|
|
||||||
UUID5 string = "^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
|
|
||||||
UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
|
|
||||||
Alpha string = "^[a-zA-Z]+$"
|
|
||||||
Alphanumeric string = "^[a-zA-Z0-9]+$"
|
|
||||||
Numeric string = "^[0-9]+$"
|
|
||||||
Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$"
|
|
||||||
Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$"
|
|
||||||
Hexadecimal string = "^[0-9a-fA-F]+$"
|
|
||||||
Hexcolor string = "^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$"
|
|
||||||
RGBcolor string = "^rgb\\(\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*\\)$"
|
|
||||||
ASCII string = "^[\x00-\x7F]+$"
|
|
||||||
Multibyte string = "[^\x00-\x7F]"
|
|
||||||
FullWidth string = "[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
|
|
||||||
HalfWidth string = "[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
|
|
||||||
Base64 string = "^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=|[A-Za-z0-9+\\/]{4})$"
|
|
||||||
PrintableASCII string = "^[\x20-\x7E]+$"
|
|
||||||
DataURI string = "^data:.+\\/(.+);base64$"
|
|
||||||
Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$"
|
|
||||||
Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$"
|
|
||||||
DNSName string = `^([a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62})*[\._]?$`
|
|
||||||
IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))`
|
|
||||||
URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)`
|
|
||||||
URLUsername string = `(\S+(:\S*)?@)`
|
|
||||||
URLPath string = `((\/|\?|#)[^\s]*)`
|
|
||||||
URLPort string = `(:(\d{1,5}))`
|
|
||||||
URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3])(\.(1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-4]))`
|
|
||||||
URLSubdomain string = `((www\.)|([a-zA-Z0-9]+([-_\.]?[a-zA-Z0-9])*[a-zA-Z0-9]\.[a-zA-Z0-9]+))`
|
|
||||||
URL string = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$`
|
|
||||||
SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$`
|
|
||||||
WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
|
|
||||||
UnixPath string = `^(/[^/\x00]*)+/?$`
|
|
||||||
Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$"
|
|
||||||
tagName string = "valid"
|
|
||||||
hasLowerCase string = ".*[[:lower:]]"
|
|
||||||
hasUpperCase string = ".*[[:upper:]]"
|
|
||||||
hasWhitespace string = ".*[[:space:]]"
|
|
||||||
hasWhitespaceOnly string = "^[[:space:]]+$"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Used by IsFilePath func
|
|
||||||
const (
|
|
||||||
// Unknown is unresolved OS type
|
|
||||||
Unknown = iota
|
|
||||||
// Win is Windows type
|
|
||||||
Win
|
|
||||||
// Unix is *nix OS types
|
|
||||||
Unix
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
userRegexp = regexp.MustCompile("^[a-zA-Z0-9!#$%&'*+/=?^_`{|}~.-]+$")
|
|
||||||
hostRegexp = regexp.MustCompile("^[^\\s]+\\.[^\\s]+$")
|
|
||||||
userDotRegexp = regexp.MustCompile("(^[.]{1})|([.]{1}$)|([.]{2,})")
|
|
||||||
rxEmail = regexp.MustCompile(Email)
|
|
||||||
rxCreditCard = regexp.MustCompile(CreditCard)
|
|
||||||
rxISBN10 = regexp.MustCompile(ISBN10)
|
|
||||||
rxISBN13 = regexp.MustCompile(ISBN13)
|
|
||||||
rxUUID3 = regexp.MustCompile(UUID3)
|
|
||||||
rxUUID4 = regexp.MustCompile(UUID4)
|
|
||||||
rxUUID5 = regexp.MustCompile(UUID5)
|
|
||||||
rxUUID = regexp.MustCompile(UUID)
|
|
||||||
rxAlpha = regexp.MustCompile(Alpha)
|
|
||||||
rxAlphanumeric = regexp.MustCompile(Alphanumeric)
|
|
||||||
rxNumeric = regexp.MustCompile(Numeric)
|
|
||||||
rxInt = regexp.MustCompile(Int)
|
|
||||||
rxFloat = regexp.MustCompile(Float)
|
|
||||||
rxHexadecimal = regexp.MustCompile(Hexadecimal)
|
|
||||||
rxHexcolor = regexp.MustCompile(Hexcolor)
|
|
||||||
rxRGBcolor = regexp.MustCompile(RGBcolor)
|
|
||||||
rxASCII = regexp.MustCompile(ASCII)
|
|
||||||
rxPrintableASCII = regexp.MustCompile(PrintableASCII)
|
|
||||||
rxMultibyte = regexp.MustCompile(Multibyte)
|
|
||||||
rxFullWidth = regexp.MustCompile(FullWidth)
|
|
||||||
rxHalfWidth = regexp.MustCompile(HalfWidth)
|
|
||||||
rxBase64 = regexp.MustCompile(Base64)
|
|
||||||
rxDataURI = regexp.MustCompile(DataURI)
|
|
||||||
rxLatitude = regexp.MustCompile(Latitude)
|
|
||||||
rxLongitude = regexp.MustCompile(Longitude)
|
|
||||||
rxDNSName = regexp.MustCompile(DNSName)
|
|
||||||
rxURL = regexp.MustCompile(URL)
|
|
||||||
rxSSN = regexp.MustCompile(SSN)
|
|
||||||
rxWinPath = regexp.MustCompile(WinPath)
|
|
||||||
rxUnixPath = regexp.MustCompile(UnixPath)
|
|
||||||
rxSemver = regexp.MustCompile(Semver)
|
|
||||||
rxHasLowerCase = regexp.MustCompile(hasLowerCase)
|
|
||||||
rxHasUpperCase = regexp.MustCompile(hasUpperCase)
|
|
||||||
rxHasWhitespace = regexp.MustCompile(hasWhitespace)
|
|
||||||
rxHasWhitespaceOnly = regexp.MustCompile(hasWhitespaceOnly)
|
|
||||||
)
|
|
636
vendor/github.com/asaskevich/govalidator/types.go
generated
vendored
636
vendor/github.com/asaskevich/govalidator/types.go
generated
vendored
@ -1,636 +0,0 @@
|
|||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"regexp"
|
|
||||||
"sort"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Validator is a wrapper for a validator function that returns bool and accepts string.
|
|
||||||
type Validator func(str string) bool
|
|
||||||
|
|
||||||
// CustomTypeValidator is a wrapper for validator functions that returns bool and accepts any type.
|
|
||||||
// The second parameter should be the context (in the case of validating a struct: the whole object being validated).
|
|
||||||
type CustomTypeValidator func(i interface{}, o interface{}) bool
|
|
||||||
|
|
||||||
// ParamValidator is a wrapper for validator functions that accepts additional parameters.
|
|
||||||
type ParamValidator func(str string, params ...string) bool
|
|
||||||
type tagOptionsMap map[string]tagOption
|
|
||||||
|
|
||||||
func (t tagOptionsMap) orderedKeys() []string {
|
|
||||||
var keys []string
|
|
||||||
for k := range t {
|
|
||||||
keys = append(keys, k)
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Slice(keys, func(a, b int) bool {
|
|
||||||
return t[keys[a]].order < t[keys[b]].order
|
|
||||||
})
|
|
||||||
|
|
||||||
return keys
|
|
||||||
}
|
|
||||||
|
|
||||||
type tagOption struct {
|
|
||||||
name string
|
|
||||||
customErrorMessage string
|
|
||||||
order int
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnsupportedTypeError is a wrapper for reflect.Type
|
|
||||||
type UnsupportedTypeError struct {
|
|
||||||
Type reflect.Type
|
|
||||||
}
|
|
||||||
|
|
||||||
// stringValues is a slice of reflect.Value holding *reflect.StringValue.
|
|
||||||
// It implements the methods to sort by string.
|
|
||||||
type stringValues []reflect.Value
|
|
||||||
|
|
||||||
// ParamTagMap is a map of functions accept variants parameters
|
|
||||||
var ParamTagMap = map[string]ParamValidator{
|
|
||||||
"length": ByteLength,
|
|
||||||
"range": Range,
|
|
||||||
"runelength": RuneLength,
|
|
||||||
"stringlength": StringLength,
|
|
||||||
"matches": StringMatches,
|
|
||||||
"in": isInRaw,
|
|
||||||
"rsapub": IsRsaPub,
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParamTagRegexMap maps param tags to their respective regexes.
|
|
||||||
var ParamTagRegexMap = map[string]*regexp.Regexp{
|
|
||||||
"range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"),
|
|
||||||
"length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"),
|
|
||||||
"runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"),
|
|
||||||
"stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"),
|
|
||||||
"in": regexp.MustCompile(`^in\((.*)\)`),
|
|
||||||
"matches": regexp.MustCompile(`^matches\((.+)\)$`),
|
|
||||||
"rsapub": regexp.MustCompile("^rsapub\\((\\d+)\\)$"),
|
|
||||||
}
|
|
||||||
|
|
||||||
type customTypeTagMap struct {
|
|
||||||
validators map[string]CustomTypeValidator
|
|
||||||
|
|
||||||
sync.RWMutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tm *customTypeTagMap) Get(name string) (CustomTypeValidator, bool) {
|
|
||||||
tm.RLock()
|
|
||||||
defer tm.RUnlock()
|
|
||||||
v, ok := tm.validators[name]
|
|
||||||
return v, ok
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tm *customTypeTagMap) Set(name string, ctv CustomTypeValidator) {
|
|
||||||
tm.Lock()
|
|
||||||
defer tm.Unlock()
|
|
||||||
tm.validators[name] = ctv
|
|
||||||
}
|
|
||||||
|
|
||||||
// CustomTypeTagMap is a map of functions that can be used as tags for ValidateStruct function.
|
|
||||||
// Use this to validate compound or custom types that need to be handled as a whole, e.g.
|
|
||||||
// `type UUID [16]byte` (this would be handled as an array of bytes).
|
|
||||||
var CustomTypeTagMap = &customTypeTagMap{validators: make(map[string]CustomTypeValidator)}
|
|
||||||
|
|
||||||
// TagMap is a map of functions, that can be used as tags for ValidateStruct function.
|
|
||||||
var TagMap = map[string]Validator{
|
|
||||||
"email": IsEmail,
|
|
||||||
"url": IsURL,
|
|
||||||
"dialstring": IsDialString,
|
|
||||||
"requrl": IsRequestURL,
|
|
||||||
"requri": IsRequestURI,
|
|
||||||
"alpha": IsAlpha,
|
|
||||||
"utfletter": IsUTFLetter,
|
|
||||||
"alphanum": IsAlphanumeric,
|
|
||||||
"utfletternum": IsUTFLetterNumeric,
|
|
||||||
"numeric": IsNumeric,
|
|
||||||
"utfnumeric": IsUTFNumeric,
|
|
||||||
"utfdigit": IsUTFDigit,
|
|
||||||
"hexadecimal": IsHexadecimal,
|
|
||||||
"hexcolor": IsHexcolor,
|
|
||||||
"rgbcolor": IsRGBcolor,
|
|
||||||
"lowercase": IsLowerCase,
|
|
||||||
"uppercase": IsUpperCase,
|
|
||||||
"int": IsInt,
|
|
||||||
"float": IsFloat,
|
|
||||||
"null": IsNull,
|
|
||||||
"uuid": IsUUID,
|
|
||||||
"uuidv3": IsUUIDv3,
|
|
||||||
"uuidv4": IsUUIDv4,
|
|
||||||
"uuidv5": IsUUIDv5,
|
|
||||||
"creditcard": IsCreditCard,
|
|
||||||
"isbn10": IsISBN10,
|
|
||||||
"isbn13": IsISBN13,
|
|
||||||
"json": IsJSON,
|
|
||||||
"multibyte": IsMultibyte,
|
|
||||||
"ascii": IsASCII,
|
|
||||||
"printableascii": IsPrintableASCII,
|
|
||||||
"fullwidth": IsFullWidth,
|
|
||||||
"halfwidth": IsHalfWidth,
|
|
||||||
"variablewidth": IsVariableWidth,
|
|
||||||
"base64": IsBase64,
|
|
||||||
"datauri": IsDataURI,
|
|
||||||
"ip": IsIP,
|
|
||||||
"port": IsPort,
|
|
||||||
"ipv4": IsIPv4,
|
|
||||||
"ipv6": IsIPv6,
|
|
||||||
"dns": IsDNSName,
|
|
||||||
"host": IsHost,
|
|
||||||
"mac": IsMAC,
|
|
||||||
"latitude": IsLatitude,
|
|
||||||
"longitude": IsLongitude,
|
|
||||||
"ssn": IsSSN,
|
|
||||||
"semver": IsSemver,
|
|
||||||
"rfc3339": IsRFC3339,
|
|
||||||
"rfc3339WithoutZone": IsRFC3339WithoutZone,
|
|
||||||
"ISO3166Alpha2": IsISO3166Alpha2,
|
|
||||||
"ISO3166Alpha3": IsISO3166Alpha3,
|
|
||||||
"ISO4217": IsISO4217,
|
|
||||||
}
|
|
||||||
|
|
||||||
// ISO3166Entry stores country codes
|
|
||||||
type ISO3166Entry struct {
|
|
||||||
EnglishShortName string
|
|
||||||
FrenchShortName string
|
|
||||||
Alpha2Code string
|
|
||||||
Alpha3Code string
|
|
||||||
Numeric string
|
|
||||||
}
|
|
||||||
|
|
||||||
//ISO3166List based on https://www.iso.org/obp/ui/#search/code/ Code Type "Officially Assigned Codes"
|
|
||||||
var ISO3166List = []ISO3166Entry{
|
|
||||||
{"Afghanistan", "Afghanistan (l')", "AF", "AFG", "004"},
|
|
||||||
{"Albania", "Albanie (l')", "AL", "ALB", "008"},
|
|
||||||
{"Antarctica", "Antarctique (l')", "AQ", "ATA", "010"},
|
|
||||||
{"Algeria", "Algérie (l')", "DZ", "DZA", "012"},
|
|
||||||
{"American Samoa", "Samoa américaines (les)", "AS", "ASM", "016"},
|
|
||||||
{"Andorra", "Andorre (l')", "AD", "AND", "020"},
|
|
||||||
{"Angola", "Angola (l')", "AO", "AGO", "024"},
|
|
||||||
{"Antigua and Barbuda", "Antigua-et-Barbuda", "AG", "ATG", "028"},
|
|
||||||
{"Azerbaijan", "Azerbaïdjan (l')", "AZ", "AZE", "031"},
|
|
||||||
{"Argentina", "Argentine (l')", "AR", "ARG", "032"},
|
|
||||||
{"Australia", "Australie (l')", "AU", "AUS", "036"},
|
|
||||||
{"Austria", "Autriche (l')", "AT", "AUT", "040"},
|
|
||||||
{"Bahamas (the)", "Bahamas (les)", "BS", "BHS", "044"},
|
|
||||||
{"Bahrain", "Bahreïn", "BH", "BHR", "048"},
|
|
||||||
{"Bangladesh", "Bangladesh (le)", "BD", "BGD", "050"},
|
|
||||||
{"Armenia", "Arménie (l')", "AM", "ARM", "051"},
|
|
||||||
{"Barbados", "Barbade (la)", "BB", "BRB", "052"},
|
|
||||||
{"Belgium", "Belgique (la)", "BE", "BEL", "056"},
|
|
||||||
{"Bermuda", "Bermudes (les)", "BM", "BMU", "060"},
|
|
||||||
{"Bhutan", "Bhoutan (le)", "BT", "BTN", "064"},
|
|
||||||
{"Bolivia (Plurinational State of)", "Bolivie (État plurinational de)", "BO", "BOL", "068"},
|
|
||||||
{"Bosnia and Herzegovina", "Bosnie-Herzégovine (la)", "BA", "BIH", "070"},
|
|
||||||
{"Botswana", "Botswana (le)", "BW", "BWA", "072"},
|
|
||||||
{"Bouvet Island", "Bouvet (l'Île)", "BV", "BVT", "074"},
|
|
||||||
{"Brazil", "Brésil (le)", "BR", "BRA", "076"},
|
|
||||||
{"Belize", "Belize (le)", "BZ", "BLZ", "084"},
|
|
||||||
{"British Indian Ocean Territory (the)", "Indien (le Territoire britannique de l'océan)", "IO", "IOT", "086"},
|
|
||||||
{"Solomon Islands", "Salomon (Îles)", "SB", "SLB", "090"},
|
|
||||||
{"Virgin Islands (British)", "Vierges britanniques (les Îles)", "VG", "VGB", "092"},
|
|
||||||
{"Brunei Darussalam", "Brunéi Darussalam (le)", "BN", "BRN", "096"},
|
|
||||||
{"Bulgaria", "Bulgarie (la)", "BG", "BGR", "100"},
|
|
||||||
{"Myanmar", "Myanmar (le)", "MM", "MMR", "104"},
|
|
||||||
{"Burundi", "Burundi (le)", "BI", "BDI", "108"},
|
|
||||||
{"Belarus", "Bélarus (le)", "BY", "BLR", "112"},
|
|
||||||
{"Cambodia", "Cambodge (le)", "KH", "KHM", "116"},
|
|
||||||
{"Cameroon", "Cameroun (le)", "CM", "CMR", "120"},
|
|
||||||
{"Canada", "Canada (le)", "CA", "CAN", "124"},
|
|
||||||
{"Cabo Verde", "Cabo Verde", "CV", "CPV", "132"},
|
|
||||||
{"Cayman Islands (the)", "Caïmans (les Îles)", "KY", "CYM", "136"},
|
|
||||||
{"Central African Republic (the)", "République centrafricaine (la)", "CF", "CAF", "140"},
|
|
||||||
{"Sri Lanka", "Sri Lanka", "LK", "LKA", "144"},
|
|
||||||
{"Chad", "Tchad (le)", "TD", "TCD", "148"},
|
|
||||||
{"Chile", "Chili (le)", "CL", "CHL", "152"},
|
|
||||||
{"China", "Chine (la)", "CN", "CHN", "156"},
|
|
||||||
{"Taiwan (Province of China)", "Taïwan (Province de Chine)", "TW", "TWN", "158"},
|
|
||||||
{"Christmas Island", "Christmas (l'Île)", "CX", "CXR", "162"},
|
|
||||||
{"Cocos (Keeling) Islands (the)", "Cocos (les Îles)/ Keeling (les Îles)", "CC", "CCK", "166"},
|
|
||||||
{"Colombia", "Colombie (la)", "CO", "COL", "170"},
|
|
||||||
{"Comoros (the)", "Comores (les)", "KM", "COM", "174"},
|
|
||||||
{"Mayotte", "Mayotte", "YT", "MYT", "175"},
|
|
||||||
{"Congo (the)", "Congo (le)", "CG", "COG", "178"},
|
|
||||||
{"Congo (the Democratic Republic of the)", "Congo (la République démocratique du)", "CD", "COD", "180"},
|
|
||||||
{"Cook Islands (the)", "Cook (les Îles)", "CK", "COK", "184"},
|
|
||||||
{"Costa Rica", "Costa Rica (le)", "CR", "CRI", "188"},
|
|
||||||
{"Croatia", "Croatie (la)", "HR", "HRV", "191"},
|
|
||||||
{"Cuba", "Cuba", "CU", "CUB", "192"},
|
|
||||||
{"Cyprus", "Chypre", "CY", "CYP", "196"},
|
|
||||||
{"Czech Republic (the)", "tchèque (la République)", "CZ", "CZE", "203"},
|
|
||||||
{"Benin", "Bénin (le)", "BJ", "BEN", "204"},
|
|
||||||
{"Denmark", "Danemark (le)", "DK", "DNK", "208"},
|
|
||||||
{"Dominica", "Dominique (la)", "DM", "DMA", "212"},
|
|
||||||
{"Dominican Republic (the)", "dominicaine (la République)", "DO", "DOM", "214"},
|
|
||||||
{"Ecuador", "Équateur (l')", "EC", "ECU", "218"},
|
|
||||||
{"El Salvador", "El Salvador", "SV", "SLV", "222"},
|
|
||||||
{"Equatorial Guinea", "Guinée équatoriale (la)", "GQ", "GNQ", "226"},
|
|
||||||
{"Ethiopia", "Éthiopie (l')", "ET", "ETH", "231"},
|
|
||||||
{"Eritrea", "Érythrée (l')", "ER", "ERI", "232"},
|
|
||||||
{"Estonia", "Estonie (l')", "EE", "EST", "233"},
|
|
||||||
{"Faroe Islands (the)", "Féroé (les Îles)", "FO", "FRO", "234"},
|
|
||||||
{"Falkland Islands (the) [Malvinas]", "Falkland (les Îles)/Malouines (les Îles)", "FK", "FLK", "238"},
|
|
||||||
{"South Georgia and the South Sandwich Islands", "Géorgie du Sud-et-les Îles Sandwich du Sud (la)", "GS", "SGS", "239"},
|
|
||||||
{"Fiji", "Fidji (les)", "FJ", "FJI", "242"},
|
|
||||||
{"Finland", "Finlande (la)", "FI", "FIN", "246"},
|
|
||||||
{"Åland Islands", "Åland(les Îles)", "AX", "ALA", "248"},
|
|
||||||
{"France", "France (la)", "FR", "FRA", "250"},
|
|
||||||
{"French Guiana", "Guyane française (la )", "GF", "GUF", "254"},
|
|
||||||
{"French Polynesia", "Polynésie française (la)", "PF", "PYF", "258"},
|
|
||||||
{"French Southern Territories (the)", "Terres australes françaises (les)", "TF", "ATF", "260"},
|
|
||||||
{"Djibouti", "Djibouti", "DJ", "DJI", "262"},
|
|
||||||
{"Gabon", "Gabon (le)", "GA", "GAB", "266"},
|
|
||||||
{"Georgia", "Géorgie (la)", "GE", "GEO", "268"},
|
|
||||||
{"Gambia (the)", "Gambie (la)", "GM", "GMB", "270"},
|
|
||||||
{"Palestine, State of", "Palestine, État de", "PS", "PSE", "275"},
|
|
||||||
{"Germany", "Allemagne (l')", "DE", "DEU", "276"},
|
|
||||||
{"Ghana", "Ghana (le)", "GH", "GHA", "288"},
|
|
||||||
{"Gibraltar", "Gibraltar", "GI", "GIB", "292"},
|
|
||||||
{"Kiribati", "Kiribati", "KI", "KIR", "296"},
|
|
||||||
{"Greece", "Grèce (la)", "GR", "GRC", "300"},
|
|
||||||
{"Greenland", "Groenland (le)", "GL", "GRL", "304"},
|
|
||||||
{"Grenada", "Grenade (la)", "GD", "GRD", "308"},
|
|
||||||
{"Guadeloupe", "Guadeloupe (la)", "GP", "GLP", "312"},
|
|
||||||
{"Guam", "Guam", "GU", "GUM", "316"},
|
|
||||||
{"Guatemala", "Guatemala (le)", "GT", "GTM", "320"},
|
|
||||||
{"Guinea", "Guinée (la)", "GN", "GIN", "324"},
|
|
||||||
{"Guyana", "Guyana (le)", "GY", "GUY", "328"},
|
|
||||||
{"Haiti", "Haïti", "HT", "HTI", "332"},
|
|
||||||
{"Heard Island and McDonald Islands", "Heard-et-Îles MacDonald (l'Île)", "HM", "HMD", "334"},
|
|
||||||
{"Holy See (the)", "Saint-Siège (le)", "VA", "VAT", "336"},
|
|
||||||
{"Honduras", "Honduras (le)", "HN", "HND", "340"},
|
|
||||||
{"Hong Kong", "Hong Kong", "HK", "HKG", "344"},
|
|
||||||
{"Hungary", "Hongrie (la)", "HU", "HUN", "348"},
|
|
||||||
{"Iceland", "Islande (l')", "IS", "ISL", "352"},
|
|
||||||
{"India", "Inde (l')", "IN", "IND", "356"},
|
|
||||||
{"Indonesia", "Indonésie (l')", "ID", "IDN", "360"},
|
|
||||||
{"Iran (Islamic Republic of)", "Iran (République Islamique d')", "IR", "IRN", "364"},
|
|
||||||
{"Iraq", "Iraq (l')", "IQ", "IRQ", "368"},
|
|
||||||
{"Ireland", "Irlande (l')", "IE", "IRL", "372"},
|
|
||||||
{"Israel", "Israël", "IL", "ISR", "376"},
|
|
||||||
{"Italy", "Italie (l')", "IT", "ITA", "380"},
|
|
||||||
{"Côte d'Ivoire", "Côte d'Ivoire (la)", "CI", "CIV", "384"},
|
|
||||||
{"Jamaica", "Jamaïque (la)", "JM", "JAM", "388"},
|
|
||||||
{"Japan", "Japon (le)", "JP", "JPN", "392"},
|
|
||||||
{"Kazakhstan", "Kazakhstan (le)", "KZ", "KAZ", "398"},
|
|
||||||
{"Jordan", "Jordanie (la)", "JO", "JOR", "400"},
|
|
||||||
{"Kenya", "Kenya (le)", "KE", "KEN", "404"},
|
|
||||||
{"Korea (the Democratic People's Republic of)", "Corée (la République populaire démocratique de)", "KP", "PRK", "408"},
|
|
||||||
{"Korea (the Republic of)", "Corée (la République de)", "KR", "KOR", "410"},
|
|
||||||
{"Kuwait", "Koweït (le)", "KW", "KWT", "414"},
|
|
||||||
{"Kyrgyzstan", "Kirghizistan (le)", "KG", "KGZ", "417"},
|
|
||||||
{"Lao People's Democratic Republic (the)", "Lao, République démocratique populaire", "LA", "LAO", "418"},
|
|
||||||
{"Lebanon", "Liban (le)", "LB", "LBN", "422"},
|
|
||||||
{"Lesotho", "Lesotho (le)", "LS", "LSO", "426"},
|
|
||||||
{"Latvia", "Lettonie (la)", "LV", "LVA", "428"},
|
|
||||||
{"Liberia", "Libéria (le)", "LR", "LBR", "430"},
|
|
||||||
{"Libya", "Libye (la)", "LY", "LBY", "434"},
|
|
||||||
{"Liechtenstein", "Liechtenstein (le)", "LI", "LIE", "438"},
|
|
||||||
{"Lithuania", "Lituanie (la)", "LT", "LTU", "440"},
|
|
||||||
{"Luxembourg", "Luxembourg (le)", "LU", "LUX", "442"},
|
|
||||||
{"Macao", "Macao", "MO", "MAC", "446"},
|
|
||||||
{"Madagascar", "Madagascar", "MG", "MDG", "450"},
|
|
||||||
{"Malawi", "Malawi (le)", "MW", "MWI", "454"},
|
|
||||||
{"Malaysia", "Malaisie (la)", "MY", "MYS", "458"},
|
|
||||||
{"Maldives", "Maldives (les)", "MV", "MDV", "462"},
|
|
||||||
{"Mali", "Mali (le)", "ML", "MLI", "466"},
|
|
||||||
{"Malta", "Malte", "MT", "MLT", "470"},
|
|
||||||
{"Martinique", "Martinique (la)", "MQ", "MTQ", "474"},
|
|
||||||
{"Mauritania", "Mauritanie (la)", "MR", "MRT", "478"},
|
|
||||||
{"Mauritius", "Maurice", "MU", "MUS", "480"},
|
|
||||||
{"Mexico", "Mexique (le)", "MX", "MEX", "484"},
|
|
||||||
{"Monaco", "Monaco", "MC", "MCO", "492"},
|
|
||||||
{"Mongolia", "Mongolie (la)", "MN", "MNG", "496"},
|
|
||||||
{"Moldova (the Republic of)", "Moldova , République de", "MD", "MDA", "498"},
|
|
||||||
{"Montenegro", "Monténégro (le)", "ME", "MNE", "499"},
|
|
||||||
{"Montserrat", "Montserrat", "MS", "MSR", "500"},
|
|
||||||
{"Morocco", "Maroc (le)", "MA", "MAR", "504"},
|
|
||||||
{"Mozambique", "Mozambique (le)", "MZ", "MOZ", "508"},
|
|
||||||
{"Oman", "Oman", "OM", "OMN", "512"},
|
|
||||||
{"Namibia", "Namibie (la)", "NA", "NAM", "516"},
|
|
||||||
{"Nauru", "Nauru", "NR", "NRU", "520"},
|
|
||||||
{"Nepal", "Népal (le)", "NP", "NPL", "524"},
|
|
||||||
{"Netherlands (the)", "Pays-Bas (les)", "NL", "NLD", "528"},
|
|
||||||
{"Curaçao", "Curaçao", "CW", "CUW", "531"},
|
|
||||||
{"Aruba", "Aruba", "AW", "ABW", "533"},
|
|
||||||
{"Sint Maarten (Dutch part)", "Saint-Martin (partie néerlandaise)", "SX", "SXM", "534"},
|
|
||||||
{"Bonaire, Sint Eustatius and Saba", "Bonaire, Saint-Eustache et Saba", "BQ", "BES", "535"},
|
|
||||||
{"New Caledonia", "Nouvelle-Calédonie (la)", "NC", "NCL", "540"},
|
|
||||||
{"Vanuatu", "Vanuatu (le)", "VU", "VUT", "548"},
|
|
||||||
{"New Zealand", "Nouvelle-Zélande (la)", "NZ", "NZL", "554"},
|
|
||||||
{"Nicaragua", "Nicaragua (le)", "NI", "NIC", "558"},
|
|
||||||
{"Niger (the)", "Niger (le)", "NE", "NER", "562"},
|
|
||||||
{"Nigeria", "Nigéria (le)", "NG", "NGA", "566"},
|
|
||||||
{"Niue", "Niue", "NU", "NIU", "570"},
|
|
||||||
{"Norfolk Island", "Norfolk (l'Île)", "NF", "NFK", "574"},
|
|
||||||
{"Norway", "Norvège (la)", "NO", "NOR", "578"},
|
|
||||||
{"Northern Mariana Islands (the)", "Mariannes du Nord (les Îles)", "MP", "MNP", "580"},
|
|
||||||
{"United States Minor Outlying Islands (the)", "Îles mineures éloignées des États-Unis (les)", "UM", "UMI", "581"},
|
|
||||||
{"Micronesia (Federated States of)", "Micronésie (États fédérés de)", "FM", "FSM", "583"},
|
|
||||||
{"Marshall Islands (the)", "Marshall (Îles)", "MH", "MHL", "584"},
|
|
||||||
{"Palau", "Palaos (les)", "PW", "PLW", "585"},
|
|
||||||
{"Pakistan", "Pakistan (le)", "PK", "PAK", "586"},
|
|
||||||
{"Panama", "Panama (le)", "PA", "PAN", "591"},
|
|
||||||
{"Papua New Guinea", "Papouasie-Nouvelle-Guinée (la)", "PG", "PNG", "598"},
|
|
||||||
{"Paraguay", "Paraguay (le)", "PY", "PRY", "600"},
|
|
||||||
{"Peru", "Pérou (le)", "PE", "PER", "604"},
|
|
||||||
{"Philippines (the)", "Philippines (les)", "PH", "PHL", "608"},
|
|
||||||
{"Pitcairn", "Pitcairn", "PN", "PCN", "612"},
|
|
||||||
{"Poland", "Pologne (la)", "PL", "POL", "616"},
|
|
||||||
{"Portugal", "Portugal (le)", "PT", "PRT", "620"},
|
|
||||||
{"Guinea-Bissau", "Guinée-Bissau (la)", "GW", "GNB", "624"},
|
|
||||||
{"Timor-Leste", "Timor-Leste (le)", "TL", "TLS", "626"},
|
|
||||||
{"Puerto Rico", "Porto Rico", "PR", "PRI", "630"},
|
|
||||||
{"Qatar", "Qatar (le)", "QA", "QAT", "634"},
|
|
||||||
{"Réunion", "Réunion (La)", "RE", "REU", "638"},
|
|
||||||
{"Romania", "Roumanie (la)", "RO", "ROU", "642"},
|
|
||||||
{"Russian Federation (the)", "Russie (la Fédération de)", "RU", "RUS", "643"},
|
|
||||||
{"Rwanda", "Rwanda (le)", "RW", "RWA", "646"},
|
|
||||||
{"Saint Barthélemy", "Saint-Barthélemy", "BL", "BLM", "652"},
|
|
||||||
{"Saint Helena, Ascension and Tristan da Cunha", "Sainte-Hélène, Ascension et Tristan da Cunha", "SH", "SHN", "654"},
|
|
||||||
{"Saint Kitts and Nevis", "Saint-Kitts-et-Nevis", "KN", "KNA", "659"},
|
|
||||||
{"Anguilla", "Anguilla", "AI", "AIA", "660"},
|
|
||||||
{"Saint Lucia", "Sainte-Lucie", "LC", "LCA", "662"},
|
|
||||||
{"Saint Martin (French part)", "Saint-Martin (partie française)", "MF", "MAF", "663"},
|
|
||||||
{"Saint Pierre and Miquelon", "Saint-Pierre-et-Miquelon", "PM", "SPM", "666"},
|
|
||||||
{"Saint Vincent and the Grenadines", "Saint-Vincent-et-les Grenadines", "VC", "VCT", "670"},
|
|
||||||
{"San Marino", "Saint-Marin", "SM", "SMR", "674"},
|
|
||||||
{"Sao Tome and Principe", "Sao Tomé-et-Principe", "ST", "STP", "678"},
|
|
||||||
{"Saudi Arabia", "Arabie saoudite (l')", "SA", "SAU", "682"},
|
|
||||||
{"Senegal", "Sénégal (le)", "SN", "SEN", "686"},
|
|
||||||
{"Serbia", "Serbie (la)", "RS", "SRB", "688"},
|
|
||||||
{"Seychelles", "Seychelles (les)", "SC", "SYC", "690"},
|
|
||||||
{"Sierra Leone", "Sierra Leone (la)", "SL", "SLE", "694"},
|
|
||||||
{"Singapore", "Singapour", "SG", "SGP", "702"},
|
|
||||||
{"Slovakia", "Slovaquie (la)", "SK", "SVK", "703"},
|
|
||||||
{"Viet Nam", "Viet Nam (le)", "VN", "VNM", "704"},
|
|
||||||
{"Slovenia", "Slovénie (la)", "SI", "SVN", "705"},
|
|
||||||
{"Somalia", "Somalie (la)", "SO", "SOM", "706"},
|
|
||||||
{"South Africa", "Afrique du Sud (l')", "ZA", "ZAF", "710"},
|
|
||||||
{"Zimbabwe", "Zimbabwe (le)", "ZW", "ZWE", "716"},
|
|
||||||
{"Spain", "Espagne (l')", "ES", "ESP", "724"},
|
|
||||||
{"South Sudan", "Soudan du Sud (le)", "SS", "SSD", "728"},
|
|
||||||
{"Sudan (the)", "Soudan (le)", "SD", "SDN", "729"},
|
|
||||||
{"Western Sahara*", "Sahara occidental (le)*", "EH", "ESH", "732"},
|
|
||||||
{"Suriname", "Suriname (le)", "SR", "SUR", "740"},
|
|
||||||
{"Svalbard and Jan Mayen", "Svalbard et l'Île Jan Mayen (le)", "SJ", "SJM", "744"},
|
|
||||||
{"Swaziland", "Swaziland (le)", "SZ", "SWZ", "748"},
|
|
||||||
{"Sweden", "Suède (la)", "SE", "SWE", "752"},
|
|
||||||
{"Switzerland", "Suisse (la)", "CH", "CHE", "756"},
|
|
||||||
{"Syrian Arab Republic", "République arabe syrienne (la)", "SY", "SYR", "760"},
|
|
||||||
{"Tajikistan", "Tadjikistan (le)", "TJ", "TJK", "762"},
|
|
||||||
{"Thailand", "Thaïlande (la)", "TH", "THA", "764"},
|
|
||||||
{"Togo", "Togo (le)", "TG", "TGO", "768"},
|
|
||||||
{"Tokelau", "Tokelau (les)", "TK", "TKL", "772"},
|
|
||||||
{"Tonga", "Tonga (les)", "TO", "TON", "776"},
|
|
||||||
{"Trinidad and Tobago", "Trinité-et-Tobago (la)", "TT", "TTO", "780"},
|
|
||||||
{"United Arab Emirates (the)", "Émirats arabes unis (les)", "AE", "ARE", "784"},
|
|
||||||
{"Tunisia", "Tunisie (la)", "TN", "TUN", "788"},
|
|
||||||
{"Turkey", "Turquie (la)", "TR", "TUR", "792"},
|
|
||||||
{"Turkmenistan", "Turkménistan (le)", "TM", "TKM", "795"},
|
|
||||||
{"Turks and Caicos Islands (the)", "Turks-et-Caïcos (les Îles)", "TC", "TCA", "796"},
|
|
||||||
{"Tuvalu", "Tuvalu (les)", "TV", "TUV", "798"},
|
|
||||||
{"Uganda", "Ouganda (l')", "UG", "UGA", "800"},
|
|
||||||
{"Ukraine", "Ukraine (l')", "UA", "UKR", "804"},
|
|
||||||
{"Macedonia (the former Yugoslav Republic of)", "Macédoine (l'ex‑République yougoslave de)", "MK", "MKD", "807"},
|
|
||||||
{"Egypt", "Égypte (l')", "EG", "EGY", "818"},
|
|
||||||
{"United Kingdom of Great Britain and Northern Ireland (the)", "Royaume-Uni de Grande-Bretagne et d'Irlande du Nord (le)", "GB", "GBR", "826"},
|
|
||||||
{"Guernsey", "Guernesey", "GG", "GGY", "831"},
|
|
||||||
{"Jersey", "Jersey", "JE", "JEY", "832"},
|
|
||||||
{"Isle of Man", "Île de Man", "IM", "IMN", "833"},
|
|
||||||
{"Tanzania, United Republic of", "Tanzanie, République-Unie de", "TZ", "TZA", "834"},
|
|
||||||
{"United States of America (the)", "États-Unis d'Amérique (les)", "US", "USA", "840"},
|
|
||||||
{"Virgin Islands (U.S.)", "Vierges des États-Unis (les Îles)", "VI", "VIR", "850"},
|
|
||||||
{"Burkina Faso", "Burkina Faso (le)", "BF", "BFA", "854"},
|
|
||||||
{"Uruguay", "Uruguay (l')", "UY", "URY", "858"},
|
|
||||||
{"Uzbekistan", "Ouzbékistan (l')", "UZ", "UZB", "860"},
|
|
||||||
{"Venezuela (Bolivarian Republic of)", "Venezuela (République bolivarienne du)", "VE", "VEN", "862"},
|
|
||||||
{"Wallis and Futuna", "Wallis-et-Futuna", "WF", "WLF", "876"},
|
|
||||||
{"Samoa", "Samoa (le)", "WS", "WSM", "882"},
|
|
||||||
{"Yemen", "Yémen (le)", "YE", "YEM", "887"},
|
|
||||||
{"Zambia", "Zambie (la)", "ZM", "ZMB", "894"},
|
|
||||||
}
|
|
||||||
|
|
||||||
// ISO4217List is the list of ISO currency codes
|
|
||||||
var ISO4217List = []string{
|
|
||||||
"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN",
|
|
||||||
"BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD",
|
|
||||||
"CAD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CUC", "CUP", "CVE", "CZK",
|
|
||||||
"DJF", "DKK", "DOP", "DZD",
|
|
||||||
"EGP", "ERN", "ETB", "EUR",
|
|
||||||
"FJD", "FKP",
|
|
||||||
"GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD",
|
|
||||||
"HKD", "HNL", "HRK", "HTG", "HUF",
|
|
||||||
"IDR", "ILS", "INR", "IQD", "IRR", "ISK",
|
|
||||||
"JMD", "JOD", "JPY",
|
|
||||||
"KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT",
|
|
||||||
"LAK", "LBP", "LKR", "LRD", "LSL", "LYD",
|
|
||||||
"MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN",
|
|
||||||
"NAD", "NGN", "NIO", "NOK", "NPR", "NZD",
|
|
||||||
"OMR",
|
|
||||||
"PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG",
|
|
||||||
"QAR",
|
|
||||||
"RON", "RSD", "RUB", "RWF",
|
|
||||||
"SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "SSP", "STD", "SVC", "SYP", "SZL",
|
|
||||||
"THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS",
|
|
||||||
"UAH", "UGX", "USD", "USN", "UYI", "UYU", "UZS",
|
|
||||||
"VEF", "VND", "VUV",
|
|
||||||
"WST",
|
|
||||||
"XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XOF", "XPD", "XPF", "XPT", "XSU", "XTS", "XUA", "XXX",
|
|
||||||
"YER",
|
|
||||||
"ZAR", "ZMW", "ZWL",
|
|
||||||
}
|
|
||||||
|
|
||||||
// ISO693Entry stores ISO language codes
|
|
||||||
type ISO693Entry struct {
|
|
||||||
Alpha3bCode string
|
|
||||||
Alpha2Code string
|
|
||||||
English string
|
|
||||||
}
|
|
||||||
|
|
||||||
//ISO693List based on http://data.okfn.org/data/core/language-codes/r/language-codes-3b2.json
|
|
||||||
var ISO693List = []ISO693Entry{
|
|
||||||
{Alpha3bCode: "aar", Alpha2Code: "aa", English: "Afar"},
|
|
||||||
{Alpha3bCode: "abk", Alpha2Code: "ab", English: "Abkhazian"},
|
|
||||||
{Alpha3bCode: "afr", Alpha2Code: "af", English: "Afrikaans"},
|
|
||||||
{Alpha3bCode: "aka", Alpha2Code: "ak", English: "Akan"},
|
|
||||||
{Alpha3bCode: "alb", Alpha2Code: "sq", English: "Albanian"},
|
|
||||||
{Alpha3bCode: "amh", Alpha2Code: "am", English: "Amharic"},
|
|
||||||
{Alpha3bCode: "ara", Alpha2Code: "ar", English: "Arabic"},
|
|
||||||
{Alpha3bCode: "arg", Alpha2Code: "an", English: "Aragonese"},
|
|
||||||
{Alpha3bCode: "arm", Alpha2Code: "hy", English: "Armenian"},
|
|
||||||
{Alpha3bCode: "asm", Alpha2Code: "as", English: "Assamese"},
|
|
||||||
{Alpha3bCode: "ava", Alpha2Code: "av", English: "Avaric"},
|
|
||||||
{Alpha3bCode: "ave", Alpha2Code: "ae", English: "Avestan"},
|
|
||||||
{Alpha3bCode: "aym", Alpha2Code: "ay", English: "Aymara"},
|
|
||||||
{Alpha3bCode: "aze", Alpha2Code: "az", English: "Azerbaijani"},
|
|
||||||
{Alpha3bCode: "bak", Alpha2Code: "ba", English: "Bashkir"},
|
|
||||||
{Alpha3bCode: "bam", Alpha2Code: "bm", English: "Bambara"},
|
|
||||||
{Alpha3bCode: "baq", Alpha2Code: "eu", English: "Basque"},
|
|
||||||
{Alpha3bCode: "bel", Alpha2Code: "be", English: "Belarusian"},
|
|
||||||
{Alpha3bCode: "ben", Alpha2Code: "bn", English: "Bengali"},
|
|
||||||
{Alpha3bCode: "bih", Alpha2Code: "bh", English: "Bihari languages"},
|
|
||||||
{Alpha3bCode: "bis", Alpha2Code: "bi", English: "Bislama"},
|
|
||||||
{Alpha3bCode: "bos", Alpha2Code: "bs", English: "Bosnian"},
|
|
||||||
{Alpha3bCode: "bre", Alpha2Code: "br", English: "Breton"},
|
|
||||||
{Alpha3bCode: "bul", Alpha2Code: "bg", English: "Bulgarian"},
|
|
||||||
{Alpha3bCode: "bur", Alpha2Code: "my", English: "Burmese"},
|
|
||||||
{Alpha3bCode: "cat", Alpha2Code: "ca", English: "Catalan; Valencian"},
|
|
||||||
{Alpha3bCode: "cha", Alpha2Code: "ch", English: "Chamorro"},
|
|
||||||
{Alpha3bCode: "che", Alpha2Code: "ce", English: "Chechen"},
|
|
||||||
{Alpha3bCode: "chi", Alpha2Code: "zh", English: "Chinese"},
|
|
||||||
{Alpha3bCode: "chu", Alpha2Code: "cu", English: "Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic"},
|
|
||||||
{Alpha3bCode: "chv", Alpha2Code: "cv", English: "Chuvash"},
|
|
||||||
{Alpha3bCode: "cor", Alpha2Code: "kw", English: "Cornish"},
|
|
||||||
{Alpha3bCode: "cos", Alpha2Code: "co", English: "Corsican"},
|
|
||||||
{Alpha3bCode: "cre", Alpha2Code: "cr", English: "Cree"},
|
|
||||||
{Alpha3bCode: "cze", Alpha2Code: "cs", English: "Czech"},
|
|
||||||
{Alpha3bCode: "dan", Alpha2Code: "da", English: "Danish"},
|
|
||||||
{Alpha3bCode: "div", Alpha2Code: "dv", English: "Divehi; Dhivehi; Maldivian"},
|
|
||||||
{Alpha3bCode: "dut", Alpha2Code: "nl", English: "Dutch; Flemish"},
|
|
||||||
{Alpha3bCode: "dzo", Alpha2Code: "dz", English: "Dzongkha"},
|
|
||||||
{Alpha3bCode: "eng", Alpha2Code: "en", English: "English"},
|
|
||||||
{Alpha3bCode: "epo", Alpha2Code: "eo", English: "Esperanto"},
|
|
||||||
{Alpha3bCode: "est", Alpha2Code: "et", English: "Estonian"},
|
|
||||||
{Alpha3bCode: "ewe", Alpha2Code: "ee", English: "Ewe"},
|
|
||||||
{Alpha3bCode: "fao", Alpha2Code: "fo", English: "Faroese"},
|
|
||||||
{Alpha3bCode: "fij", Alpha2Code: "fj", English: "Fijian"},
|
|
||||||
{Alpha3bCode: "fin", Alpha2Code: "fi", English: "Finnish"},
|
|
||||||
{Alpha3bCode: "fre", Alpha2Code: "fr", English: "French"},
|
|
||||||
{Alpha3bCode: "fry", Alpha2Code: "fy", English: "Western Frisian"},
|
|
||||||
{Alpha3bCode: "ful", Alpha2Code: "ff", English: "Fulah"},
|
|
||||||
{Alpha3bCode: "geo", Alpha2Code: "ka", English: "Georgian"},
|
|
||||||
{Alpha3bCode: "ger", Alpha2Code: "de", English: "German"},
|
|
||||||
{Alpha3bCode: "gla", Alpha2Code: "gd", English: "Gaelic; Scottish Gaelic"},
|
|
||||||
{Alpha3bCode: "gle", Alpha2Code: "ga", English: "Irish"},
|
|
||||||
{Alpha3bCode: "glg", Alpha2Code: "gl", English: "Galician"},
|
|
||||||
{Alpha3bCode: "glv", Alpha2Code: "gv", English: "Manx"},
|
|
||||||
{Alpha3bCode: "gre", Alpha2Code: "el", English: "Greek, Modern (1453-)"},
|
|
||||||
{Alpha3bCode: "grn", Alpha2Code: "gn", English: "Guarani"},
|
|
||||||
{Alpha3bCode: "guj", Alpha2Code: "gu", English: "Gujarati"},
|
|
||||||
{Alpha3bCode: "hat", Alpha2Code: "ht", English: "Haitian; Haitian Creole"},
|
|
||||||
{Alpha3bCode: "hau", Alpha2Code: "ha", English: "Hausa"},
|
|
||||||
{Alpha3bCode: "heb", Alpha2Code: "he", English: "Hebrew"},
|
|
||||||
{Alpha3bCode: "her", Alpha2Code: "hz", English: "Herero"},
|
|
||||||
{Alpha3bCode: "hin", Alpha2Code: "hi", English: "Hindi"},
|
|
||||||
{Alpha3bCode: "hmo", Alpha2Code: "ho", English: "Hiri Motu"},
|
|
||||||
{Alpha3bCode: "hrv", Alpha2Code: "hr", English: "Croatian"},
|
|
||||||
{Alpha3bCode: "hun", Alpha2Code: "hu", English: "Hungarian"},
|
|
||||||
{Alpha3bCode: "ibo", Alpha2Code: "ig", English: "Igbo"},
|
|
||||||
{Alpha3bCode: "ice", Alpha2Code: "is", English: "Icelandic"},
|
|
||||||
{Alpha3bCode: "ido", Alpha2Code: "io", English: "Ido"},
|
|
||||||
{Alpha3bCode: "iii", Alpha2Code: "ii", English: "Sichuan Yi; Nuosu"},
|
|
||||||
{Alpha3bCode: "iku", Alpha2Code: "iu", English: "Inuktitut"},
|
|
||||||
{Alpha3bCode: "ile", Alpha2Code: "ie", English: "Interlingue; Occidental"},
|
|
||||||
{Alpha3bCode: "ina", Alpha2Code: "ia", English: "Interlingua (International Auxiliary Language Association)"},
|
|
||||||
{Alpha3bCode: "ind", Alpha2Code: "id", English: "Indonesian"},
|
|
||||||
{Alpha3bCode: "ipk", Alpha2Code: "ik", English: "Inupiaq"},
|
|
||||||
{Alpha3bCode: "ita", Alpha2Code: "it", English: "Italian"},
|
|
||||||
{Alpha3bCode: "jav", Alpha2Code: "jv", English: "Javanese"},
|
|
||||||
{Alpha3bCode: "jpn", Alpha2Code: "ja", English: "Japanese"},
|
|
||||||
{Alpha3bCode: "kal", Alpha2Code: "kl", English: "Kalaallisut; Greenlandic"},
|
|
||||||
{Alpha3bCode: "kan", Alpha2Code: "kn", English: "Kannada"},
|
|
||||||
{Alpha3bCode: "kas", Alpha2Code: "ks", English: "Kashmiri"},
|
|
||||||
{Alpha3bCode: "kau", Alpha2Code: "kr", English: "Kanuri"},
|
|
||||||
{Alpha3bCode: "kaz", Alpha2Code: "kk", English: "Kazakh"},
|
|
||||||
{Alpha3bCode: "khm", Alpha2Code: "km", English: "Central Khmer"},
|
|
||||||
{Alpha3bCode: "kik", Alpha2Code: "ki", English: "Kikuyu; Gikuyu"},
|
|
||||||
{Alpha3bCode: "kin", Alpha2Code: "rw", English: "Kinyarwanda"},
|
|
||||||
{Alpha3bCode: "kir", Alpha2Code: "ky", English: "Kirghiz; Kyrgyz"},
|
|
||||||
{Alpha3bCode: "kom", Alpha2Code: "kv", English: "Komi"},
|
|
||||||
{Alpha3bCode: "kon", Alpha2Code: "kg", English: "Kongo"},
|
|
||||||
{Alpha3bCode: "kor", Alpha2Code: "ko", English: "Korean"},
|
|
||||||
{Alpha3bCode: "kua", Alpha2Code: "kj", English: "Kuanyama; Kwanyama"},
|
|
||||||
{Alpha3bCode: "kur", Alpha2Code: "ku", English: "Kurdish"},
|
|
||||||
{Alpha3bCode: "lao", Alpha2Code: "lo", English: "Lao"},
|
|
||||||
{Alpha3bCode: "lat", Alpha2Code: "la", English: "Latin"},
|
|
||||||
{Alpha3bCode: "lav", Alpha2Code: "lv", English: "Latvian"},
|
|
||||||
{Alpha3bCode: "lim", Alpha2Code: "li", English: "Limburgan; Limburger; Limburgish"},
|
|
||||||
{Alpha3bCode: "lin", Alpha2Code: "ln", English: "Lingala"},
|
|
||||||
{Alpha3bCode: "lit", Alpha2Code: "lt", English: "Lithuanian"},
|
|
||||||
{Alpha3bCode: "ltz", Alpha2Code: "lb", English: "Luxembourgish; Letzeburgesch"},
|
|
||||||
{Alpha3bCode: "lub", Alpha2Code: "lu", English: "Luba-Katanga"},
|
|
||||||
{Alpha3bCode: "lug", Alpha2Code: "lg", English: "Ganda"},
|
|
||||||
{Alpha3bCode: "mac", Alpha2Code: "mk", English: "Macedonian"},
|
|
||||||
{Alpha3bCode: "mah", Alpha2Code: "mh", English: "Marshallese"},
|
|
||||||
{Alpha3bCode: "mal", Alpha2Code: "ml", English: "Malayalam"},
|
|
||||||
{Alpha3bCode: "mao", Alpha2Code: "mi", English: "Maori"},
|
|
||||||
{Alpha3bCode: "mar", Alpha2Code: "mr", English: "Marathi"},
|
|
||||||
{Alpha3bCode: "may", Alpha2Code: "ms", English: "Malay"},
|
|
||||||
{Alpha3bCode: "mlg", Alpha2Code: "mg", English: "Malagasy"},
|
|
||||||
{Alpha3bCode: "mlt", Alpha2Code: "mt", English: "Maltese"},
|
|
||||||
{Alpha3bCode: "mon", Alpha2Code: "mn", English: "Mongolian"},
|
|
||||||
{Alpha3bCode: "nau", Alpha2Code: "na", English: "Nauru"},
|
|
||||||
{Alpha3bCode: "nav", Alpha2Code: "nv", English: "Navajo; Navaho"},
|
|
||||||
{Alpha3bCode: "nbl", Alpha2Code: "nr", English: "Ndebele, South; South Ndebele"},
|
|
||||||
{Alpha3bCode: "nde", Alpha2Code: "nd", English: "Ndebele, North; North Ndebele"},
|
|
||||||
{Alpha3bCode: "ndo", Alpha2Code: "ng", English: "Ndonga"},
|
|
||||||
{Alpha3bCode: "nep", Alpha2Code: "ne", English: "Nepali"},
|
|
||||||
{Alpha3bCode: "nno", Alpha2Code: "nn", English: "Norwegian Nynorsk; Nynorsk, Norwegian"},
|
|
||||||
{Alpha3bCode: "nob", Alpha2Code: "nb", English: "Bokmål, Norwegian; Norwegian Bokmål"},
|
|
||||||
{Alpha3bCode: "nor", Alpha2Code: "no", English: "Norwegian"},
|
|
||||||
{Alpha3bCode: "nya", Alpha2Code: "ny", English: "Chichewa; Chewa; Nyanja"},
|
|
||||||
{Alpha3bCode: "oci", Alpha2Code: "oc", English: "Occitan (post 1500); Provençal"},
|
|
||||||
{Alpha3bCode: "oji", Alpha2Code: "oj", English: "Ojibwa"},
|
|
||||||
{Alpha3bCode: "ori", Alpha2Code: "or", English: "Oriya"},
|
|
||||||
{Alpha3bCode: "orm", Alpha2Code: "om", English: "Oromo"},
|
|
||||||
{Alpha3bCode: "oss", Alpha2Code: "os", English: "Ossetian; Ossetic"},
|
|
||||||
{Alpha3bCode: "pan", Alpha2Code: "pa", English: "Panjabi; Punjabi"},
|
|
||||||
{Alpha3bCode: "per", Alpha2Code: "fa", English: "Persian"},
|
|
||||||
{Alpha3bCode: "pli", Alpha2Code: "pi", English: "Pali"},
|
|
||||||
{Alpha3bCode: "pol", Alpha2Code: "pl", English: "Polish"},
|
|
||||||
{Alpha3bCode: "por", Alpha2Code: "pt", English: "Portuguese"},
|
|
||||||
{Alpha3bCode: "pus", Alpha2Code: "ps", English: "Pushto; Pashto"},
|
|
||||||
{Alpha3bCode: "que", Alpha2Code: "qu", English: "Quechua"},
|
|
||||||
{Alpha3bCode: "roh", Alpha2Code: "rm", English: "Romansh"},
|
|
||||||
{Alpha3bCode: "rum", Alpha2Code: "ro", English: "Romanian; Moldavian; Moldovan"},
|
|
||||||
{Alpha3bCode: "run", Alpha2Code: "rn", English: "Rundi"},
|
|
||||||
{Alpha3bCode: "rus", Alpha2Code: "ru", English: "Russian"},
|
|
||||||
{Alpha3bCode: "sag", Alpha2Code: "sg", English: "Sango"},
|
|
||||||
{Alpha3bCode: "san", Alpha2Code: "sa", English: "Sanskrit"},
|
|
||||||
{Alpha3bCode: "sin", Alpha2Code: "si", English: "Sinhala; Sinhalese"},
|
|
||||||
{Alpha3bCode: "slo", Alpha2Code: "sk", English: "Slovak"},
|
|
||||||
{Alpha3bCode: "slv", Alpha2Code: "sl", English: "Slovenian"},
|
|
||||||
{Alpha3bCode: "sme", Alpha2Code: "se", English: "Northern Sami"},
|
|
||||||
{Alpha3bCode: "smo", Alpha2Code: "sm", English: "Samoan"},
|
|
||||||
{Alpha3bCode: "sna", Alpha2Code: "sn", English: "Shona"},
|
|
||||||
{Alpha3bCode: "snd", Alpha2Code: "sd", English: "Sindhi"},
|
|
||||||
{Alpha3bCode: "som", Alpha2Code: "so", English: "Somali"},
|
|
||||||
{Alpha3bCode: "sot", Alpha2Code: "st", English: "Sotho, Southern"},
|
|
||||||
{Alpha3bCode: "spa", Alpha2Code: "es", English: "Spanish; Castilian"},
|
|
||||||
{Alpha3bCode: "srd", Alpha2Code: "sc", English: "Sardinian"},
|
|
||||||
{Alpha3bCode: "srp", Alpha2Code: "sr", English: "Serbian"},
|
|
||||||
{Alpha3bCode: "ssw", Alpha2Code: "ss", English: "Swati"},
|
|
||||||
{Alpha3bCode: "sun", Alpha2Code: "su", English: "Sundanese"},
|
|
||||||
{Alpha3bCode: "swa", Alpha2Code: "sw", English: "Swahili"},
|
|
||||||
{Alpha3bCode: "swe", Alpha2Code: "sv", English: "Swedish"},
|
|
||||||
{Alpha3bCode: "tah", Alpha2Code: "ty", English: "Tahitian"},
|
|
||||||
{Alpha3bCode: "tam", Alpha2Code: "ta", English: "Tamil"},
|
|
||||||
{Alpha3bCode: "tat", Alpha2Code: "tt", English: "Tatar"},
|
|
||||||
{Alpha3bCode: "tel", Alpha2Code: "te", English: "Telugu"},
|
|
||||||
{Alpha3bCode: "tgk", Alpha2Code: "tg", English: "Tajik"},
|
|
||||||
{Alpha3bCode: "tgl", Alpha2Code: "tl", English: "Tagalog"},
|
|
||||||
{Alpha3bCode: "tha", Alpha2Code: "th", English: "Thai"},
|
|
||||||
{Alpha3bCode: "tib", Alpha2Code: "bo", English: "Tibetan"},
|
|
||||||
{Alpha3bCode: "tir", Alpha2Code: "ti", English: "Tigrinya"},
|
|
||||||
{Alpha3bCode: "ton", Alpha2Code: "to", English: "Tonga (Tonga Islands)"},
|
|
||||||
{Alpha3bCode: "tsn", Alpha2Code: "tn", English: "Tswana"},
|
|
||||||
{Alpha3bCode: "tso", Alpha2Code: "ts", English: "Tsonga"},
|
|
||||||
{Alpha3bCode: "tuk", Alpha2Code: "tk", English: "Turkmen"},
|
|
||||||
{Alpha3bCode: "tur", Alpha2Code: "tr", English: "Turkish"},
|
|
||||||
{Alpha3bCode: "twi", Alpha2Code: "tw", English: "Twi"},
|
|
||||||
{Alpha3bCode: "uig", Alpha2Code: "ug", English: "Uighur; Uyghur"},
|
|
||||||
{Alpha3bCode: "ukr", Alpha2Code: "uk", English: "Ukrainian"},
|
|
||||||
{Alpha3bCode: "urd", Alpha2Code: "ur", English: "Urdu"},
|
|
||||||
{Alpha3bCode: "uzb", Alpha2Code: "uz", English: "Uzbek"},
|
|
||||||
{Alpha3bCode: "ven", Alpha2Code: "ve", English: "Venda"},
|
|
||||||
{Alpha3bCode: "vie", Alpha2Code: "vi", English: "Vietnamese"},
|
|
||||||
{Alpha3bCode: "vol", Alpha2Code: "vo", English: "Volapük"},
|
|
||||||
{Alpha3bCode: "wel", Alpha2Code: "cy", English: "Welsh"},
|
|
||||||
{Alpha3bCode: "wln", Alpha2Code: "wa", English: "Walloon"},
|
|
||||||
{Alpha3bCode: "wol", Alpha2Code: "wo", English: "Wolof"},
|
|
||||||
{Alpha3bCode: "xho", Alpha2Code: "xh", English: "Xhosa"},
|
|
||||||
{Alpha3bCode: "yid", Alpha2Code: "yi", English: "Yiddish"},
|
|
||||||
{Alpha3bCode: "yor", Alpha2Code: "yo", English: "Yoruba"},
|
|
||||||
{Alpha3bCode: "zha", Alpha2Code: "za", English: "Zhuang; Chuang"},
|
|
||||||
{Alpha3bCode: "zul", Alpha2Code: "zu", English: "Zulu"},
|
|
||||||
}
|
|
270
vendor/github.com/asaskevich/govalidator/utils.go
generated
vendored
270
vendor/github.com/asaskevich/govalidator/utils.go
generated
vendored
@ -1,270 +0,0 @@
|
|||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"html"
|
|
||||||
"math"
|
|
||||||
"path"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Contains check if the string contains the substring.
|
|
||||||
func Contains(str, substring string) bool {
|
|
||||||
return strings.Contains(str, substring)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Matches check if string matches the pattern (pattern is regular expression)
|
|
||||||
// In case of error return false
|
|
||||||
func Matches(str, pattern string) bool {
|
|
||||||
match, _ := regexp.MatchString(pattern, str)
|
|
||||||
return match
|
|
||||||
}
|
|
||||||
|
|
||||||
// LeftTrim trim characters from the left-side of the input.
|
|
||||||
// If second argument is empty, it's will be remove leading spaces.
|
|
||||||
func LeftTrim(str, chars string) string {
|
|
||||||
if chars == "" {
|
|
||||||
return strings.TrimLeftFunc(str, unicode.IsSpace)
|
|
||||||
}
|
|
||||||
r, _ := regexp.Compile("^[" + chars + "]+")
|
|
||||||
return r.ReplaceAllString(str, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// RightTrim trim characters from the right-side of the input.
|
|
||||||
// If second argument is empty, it's will be remove spaces.
|
|
||||||
func RightTrim(str, chars string) string {
|
|
||||||
if chars == "" {
|
|
||||||
return strings.TrimRightFunc(str, unicode.IsSpace)
|
|
||||||
}
|
|
||||||
r, _ := regexp.Compile("[" + chars + "]+$")
|
|
||||||
return r.ReplaceAllString(str, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Trim trim characters from both sides of the input.
|
|
||||||
// If second argument is empty, it's will be remove spaces.
|
|
||||||
func Trim(str, chars string) string {
|
|
||||||
return LeftTrim(RightTrim(str, chars), chars)
|
|
||||||
}
|
|
||||||
|
|
||||||
// WhiteList remove characters that do not appear in the whitelist.
|
|
||||||
func WhiteList(str, chars string) string {
|
|
||||||
pattern := "[^" + chars + "]+"
|
|
||||||
r, _ := regexp.Compile(pattern)
|
|
||||||
return r.ReplaceAllString(str, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlackList remove characters that appear in the blacklist.
|
|
||||||
func BlackList(str, chars string) string {
|
|
||||||
pattern := "[" + chars + "]+"
|
|
||||||
r, _ := regexp.Compile(pattern)
|
|
||||||
return r.ReplaceAllString(str, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// StripLow remove characters with a numerical value < 32 and 127, mostly control characters.
|
|
||||||
// If keep_new_lines is true, newline characters are preserved (\n and \r, hex 0xA and 0xD).
|
|
||||||
func StripLow(str string, keepNewLines bool) string {
|
|
||||||
chars := ""
|
|
||||||
if keepNewLines {
|
|
||||||
chars = "\x00-\x09\x0B\x0C\x0E-\x1F\x7F"
|
|
||||||
} else {
|
|
||||||
chars = "\x00-\x1F\x7F"
|
|
||||||
}
|
|
||||||
return BlackList(str, chars)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReplacePattern replace regular expression pattern in string
|
|
||||||
func ReplacePattern(str, pattern, replace string) string {
|
|
||||||
r, _ := regexp.Compile(pattern)
|
|
||||||
return r.ReplaceAllString(str, replace)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Escape replace <, >, & and " with HTML entities.
|
|
||||||
var Escape = html.EscapeString
|
|
||||||
|
|
||||||
func addSegment(inrune, segment []rune) []rune {
|
|
||||||
if len(segment) == 0 {
|
|
||||||
return inrune
|
|
||||||
}
|
|
||||||
if len(inrune) != 0 {
|
|
||||||
inrune = append(inrune, '_')
|
|
||||||
}
|
|
||||||
inrune = append(inrune, segment...)
|
|
||||||
return inrune
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnderscoreToCamelCase converts from underscore separated form to camel case form.
|
|
||||||
// Ex.: my_func => MyFunc
|
|
||||||
func UnderscoreToCamelCase(s string) string {
|
|
||||||
return strings.Replace(strings.Title(strings.Replace(strings.ToLower(s), "_", " ", -1)), " ", "", -1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// CamelCaseToUnderscore converts from camel case form to underscore separated form.
|
|
||||||
// Ex.: MyFunc => my_func
|
|
||||||
func CamelCaseToUnderscore(str string) string {
|
|
||||||
var output []rune
|
|
||||||
var segment []rune
|
|
||||||
for _, r := range str {
|
|
||||||
|
|
||||||
// not treat number as separate segment
|
|
||||||
if !unicode.IsLower(r) && string(r) != "_" && !unicode.IsNumber(r) {
|
|
||||||
output = addSegment(output, segment)
|
|
||||||
segment = nil
|
|
||||||
}
|
|
||||||
segment = append(segment, unicode.ToLower(r))
|
|
||||||
}
|
|
||||||
output = addSegment(output, segment)
|
|
||||||
return string(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reverse return reversed string
|
|
||||||
func Reverse(s string) string {
|
|
||||||
r := []rune(s)
|
|
||||||
for i, j := 0, len(r)-1; i < j; i, j = i+1, j-1 {
|
|
||||||
r[i], r[j] = r[j], r[i]
|
|
||||||
}
|
|
||||||
return string(r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetLines split string by "\n" and return array of lines
|
|
||||||
func GetLines(s string) []string {
|
|
||||||
return strings.Split(s, "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetLine return specified line of multiline string
|
|
||||||
func GetLine(s string, index int) (string, error) {
|
|
||||||
lines := GetLines(s)
|
|
||||||
if index < 0 || index >= len(lines) {
|
|
||||||
return "", errors.New("line index out of bounds")
|
|
||||||
}
|
|
||||||
return lines[index], nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RemoveTags remove all tags from HTML string
|
|
||||||
func RemoveTags(s string) string {
|
|
||||||
return ReplacePattern(s, "<[^>]*>", "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// SafeFileName return safe string that can be used in file names
|
|
||||||
func SafeFileName(str string) string {
|
|
||||||
name := strings.ToLower(str)
|
|
||||||
name = path.Clean(path.Base(name))
|
|
||||||
name = strings.Trim(name, " ")
|
|
||||||
separators, err := regexp.Compile(`[ &_=+:]`)
|
|
||||||
if err == nil {
|
|
||||||
name = separators.ReplaceAllString(name, "-")
|
|
||||||
}
|
|
||||||
legal, err := regexp.Compile(`[^[:alnum:]-.]`)
|
|
||||||
if err == nil {
|
|
||||||
name = legal.ReplaceAllString(name, "")
|
|
||||||
}
|
|
||||||
for strings.Contains(name, "--") {
|
|
||||||
name = strings.Replace(name, "--", "-", -1)
|
|
||||||
}
|
|
||||||
return name
|
|
||||||
}
|
|
||||||
|
|
||||||
// NormalizeEmail canonicalize an email address.
|
|
||||||
// The local part of the email address is lowercased for all domains; the hostname is always lowercased and
|
|
||||||
// the local part of the email address is always lowercased for hosts that are known to be case-insensitive (currently only GMail).
|
|
||||||
// Normalization follows special rules for known providers: currently, GMail addresses have dots removed in the local part and
|
|
||||||
// are stripped of tags (e.g. some.one+tag@gmail.com becomes someone@gmail.com) and all @googlemail.com addresses are
|
|
||||||
// normalized to @gmail.com.
|
|
||||||
func NormalizeEmail(str string) (string, error) {
|
|
||||||
if !IsEmail(str) {
|
|
||||||
return "", fmt.Errorf("%s is not an email", str)
|
|
||||||
}
|
|
||||||
parts := strings.Split(str, "@")
|
|
||||||
parts[0] = strings.ToLower(parts[0])
|
|
||||||
parts[1] = strings.ToLower(parts[1])
|
|
||||||
if parts[1] == "gmail.com" || parts[1] == "googlemail.com" {
|
|
||||||
parts[1] = "gmail.com"
|
|
||||||
parts[0] = strings.Split(ReplacePattern(parts[0], `\.`, ""), "+")[0]
|
|
||||||
}
|
|
||||||
return strings.Join(parts, "@"), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Truncate a string to the closest length without breaking words.
|
|
||||||
func Truncate(str string, length int, ending string) string {
|
|
||||||
var aftstr, befstr string
|
|
||||||
if len(str) > length {
|
|
||||||
words := strings.Fields(str)
|
|
||||||
before, present := 0, 0
|
|
||||||
for i := range words {
|
|
||||||
befstr = aftstr
|
|
||||||
before = present
|
|
||||||
aftstr = aftstr + words[i] + " "
|
|
||||||
present = len(aftstr)
|
|
||||||
if present > length && i != 0 {
|
|
||||||
if (length - before) < (present - length) {
|
|
||||||
return Trim(befstr, " /\\.,\"'#!?&@+-") + ending
|
|
||||||
}
|
|
||||||
return Trim(aftstr, " /\\.,\"'#!?&@+-") + ending
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
|
|
||||||
// PadLeft pad left side of string if size of string is less then indicated pad length
|
|
||||||
func PadLeft(str string, padStr string, padLen int) string {
|
|
||||||
return buildPadStr(str, padStr, padLen, true, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
// PadRight pad right side of string if size of string is less then indicated pad length
|
|
||||||
func PadRight(str string, padStr string, padLen int) string {
|
|
||||||
return buildPadStr(str, padStr, padLen, false, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
// PadBoth pad sides of string if size of string is less then indicated pad length
|
|
||||||
func PadBoth(str string, padStr string, padLen int) string {
|
|
||||||
return buildPadStr(str, padStr, padLen, true, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
// PadString either left, right or both sides, not the padding string can be unicode and more then one
|
|
||||||
// character
|
|
||||||
func buildPadStr(str string, padStr string, padLen int, padLeft bool, padRight bool) string {
|
|
||||||
|
|
||||||
// When padded length is less then the current string size
|
|
||||||
if padLen < utf8.RuneCountInString(str) {
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
|
|
||||||
padLen -= utf8.RuneCountInString(str)
|
|
||||||
|
|
||||||
targetLen := padLen
|
|
||||||
|
|
||||||
targetLenLeft := targetLen
|
|
||||||
targetLenRight := targetLen
|
|
||||||
if padLeft && padRight {
|
|
||||||
targetLenLeft = padLen / 2
|
|
||||||
targetLenRight = padLen - targetLenLeft
|
|
||||||
}
|
|
||||||
|
|
||||||
strToRepeatLen := utf8.RuneCountInString(padStr)
|
|
||||||
|
|
||||||
repeatTimes := int(math.Ceil(float64(targetLen) / float64(strToRepeatLen)))
|
|
||||||
repeatedString := strings.Repeat(padStr, repeatTimes)
|
|
||||||
|
|
||||||
leftSide := ""
|
|
||||||
if padLeft {
|
|
||||||
leftSide = repeatedString[0:targetLenLeft]
|
|
||||||
}
|
|
||||||
|
|
||||||
rightSide := ""
|
|
||||||
if padRight {
|
|
||||||
rightSide = repeatedString[0:targetLenRight]
|
|
||||||
}
|
|
||||||
|
|
||||||
return leftSide + str + rightSide
|
|
||||||
}
|
|
||||||
|
|
||||||
// TruncatingErrorf removes extra args from fmt.Errorf if not formatted in the str object
|
|
||||||
func TruncatingErrorf(str string, args ...interface{}) error {
|
|
||||||
n := strings.Count(str, "%s")
|
|
||||||
return fmt.Errorf(str, args[:n]...)
|
|
||||||
}
|
|
1278
vendor/github.com/asaskevich/govalidator/validator.go
generated
vendored
1278
vendor/github.com/asaskevich/govalidator/validator.go
generated
vendored
File diff suppressed because it is too large
Load Diff
15
vendor/github.com/asaskevich/govalidator/wercker.yml
generated
vendored
15
vendor/github.com/asaskevich/govalidator/wercker.yml
generated
vendored
@ -1,15 +0,0 @@
|
|||||||
box: golang
|
|
||||||
build:
|
|
||||||
steps:
|
|
||||||
- setup-go-workspace
|
|
||||||
|
|
||||||
- script:
|
|
||||||
name: go get
|
|
||||||
code: |
|
|
||||||
go version
|
|
||||||
go get -t ./...
|
|
||||||
|
|
||||||
- script:
|
|
||||||
name: go test
|
|
||||||
code: |
|
|
||||||
go test -race ./...
|
|
14
vendor/github.com/beevik/etree/.travis.yml
generated
vendored
14
vendor/github.com/beevik/etree/.travis.yml
generated
vendored
@ -1,14 +0,0 @@
|
|||||||
language: go
|
|
||||||
sudo: false
|
|
||||||
|
|
||||||
go:
|
|
||||||
- 1.11.x
|
|
||||||
- tip
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
allow_failures:
|
|
||||||
- go: tip
|
|
||||||
|
|
||||||
script:
|
|
||||||
- go vet ./...
|
|
||||||
- go test -v ./...
|
|
10
vendor/github.com/beevik/etree/CONTRIBUTORS
generated
vendored
10
vendor/github.com/beevik/etree/CONTRIBUTORS
generated
vendored
@ -1,10 +0,0 @@
|
|||||||
Brett Vickers (beevik)
|
|
||||||
Felix Geisendörfer (felixge)
|
|
||||||
Kamil Kisiel (kisielk)
|
|
||||||
Graham King (grahamking)
|
|
||||||
Matt Smith (ma314smith)
|
|
||||||
Michal Jemala (michaljemala)
|
|
||||||
Nicolas Piganeau (npiganeau)
|
|
||||||
Chris Brown (ccbrown)
|
|
||||||
Earncef Sequeira (earncef)
|
|
||||||
Gabriel de Labachelerie (wuzuf)
|
|
24
vendor/github.com/beevik/etree/LICENSE
generated
vendored
24
vendor/github.com/beevik/etree/LICENSE
generated
vendored
@ -1,24 +0,0 @@
|
|||||||
Copyright 2015-2019 Brett Vickers. All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions
|
|
||||||
are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDER ``AS IS'' AND ANY
|
|
||||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER OR
|
|
||||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
|
||||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
||||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
|
||||||
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
205
vendor/github.com/beevik/etree/README.md
generated
vendored
205
vendor/github.com/beevik/etree/README.md
generated
vendored
@ -1,205 +0,0 @@
|
|||||||
[](https://travis-ci.org/beevik/etree)
|
|
||||||
[](https://godoc.org/github.com/beevik/etree)
|
|
||||||
|
|
||||||
etree
|
|
||||||
=====
|
|
||||||
|
|
||||||
The etree package is a lightweight, pure go package that expresses XML in
|
|
||||||
the form of an element tree. Its design was inspired by the Python
|
|
||||||
[ElementTree](http://docs.python.org/2/library/xml.etree.elementtree.html)
|
|
||||||
module.
|
|
||||||
|
|
||||||
Some of the package's capabilities and features:
|
|
||||||
|
|
||||||
* Represents XML documents as trees of elements for easy traversal.
|
|
||||||
* Imports, serializes, modifies or creates XML documents from scratch.
|
|
||||||
* Writes and reads XML to/from files, byte slices, strings and io interfaces.
|
|
||||||
* Performs simple or complex searches with lightweight XPath-like query APIs.
|
|
||||||
* Auto-indents XML using spaces or tabs for better readability.
|
|
||||||
* Implemented in pure go; depends only on standard go libraries.
|
|
||||||
* Built on top of the go [encoding/xml](http://golang.org/pkg/encoding/xml)
|
|
||||||
package.
|
|
||||||
|
|
||||||
### Creating an XML document
|
|
||||||
|
|
||||||
The following example creates an XML document from scratch using the etree
|
|
||||||
package and outputs its indented contents to stdout.
|
|
||||||
```go
|
|
||||||
doc := etree.NewDocument()
|
|
||||||
doc.CreateProcInst("xml", `version="1.0" encoding="UTF-8"`)
|
|
||||||
doc.CreateProcInst("xml-stylesheet", `type="text/xsl" href="style.xsl"`)
|
|
||||||
|
|
||||||
people := doc.CreateElement("People")
|
|
||||||
people.CreateComment("These are all known people")
|
|
||||||
|
|
||||||
jon := people.CreateElement("Person")
|
|
||||||
jon.CreateAttr("name", "Jon")
|
|
||||||
|
|
||||||
sally := people.CreateElement("Person")
|
|
||||||
sally.CreateAttr("name", "Sally")
|
|
||||||
|
|
||||||
doc.Indent(2)
|
|
||||||
doc.WriteTo(os.Stdout)
|
|
||||||
```
|
|
||||||
|
|
||||||
Output:
|
|
||||||
```xml
|
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<?xml-stylesheet type="text/xsl" href="style.xsl"?>
|
|
||||||
<People>
|
|
||||||
<!--These are all known people-->
|
|
||||||
<Person name="Jon"/>
|
|
||||||
<Person name="Sally"/>
|
|
||||||
</People>
|
|
||||||
```
|
|
||||||
|
|
||||||
### Reading an XML file
|
|
||||||
|
|
||||||
Suppose you have a file on disk called `bookstore.xml` containing the
|
|
||||||
following data:
|
|
||||||
|
|
||||||
```xml
|
|
||||||
<bookstore xmlns:p="urn:schemas-books-com:prices">
|
|
||||||
|
|
||||||
<book category="COOKING">
|
|
||||||
<title lang="en">Everyday Italian</title>
|
|
||||||
<author>Giada De Laurentiis</author>
|
|
||||||
<year>2005</year>
|
|
||||||
<p:price>30.00</p:price>
|
|
||||||
</book>
|
|
||||||
|
|
||||||
<book category="CHILDREN">
|
|
||||||
<title lang="en">Harry Potter</title>
|
|
||||||
<author>J K. Rowling</author>
|
|
||||||
<year>2005</year>
|
|
||||||
<p:price>29.99</p:price>
|
|
||||||
</book>
|
|
||||||
|
|
||||||
<book category="WEB">
|
|
||||||
<title lang="en">XQuery Kick Start</title>
|
|
||||||
<author>James McGovern</author>
|
|
||||||
<author>Per Bothner</author>
|
|
||||||
<author>Kurt Cagle</author>
|
|
||||||
<author>James Linn</author>
|
|
||||||
<author>Vaidyanathan Nagarajan</author>
|
|
||||||
<year>2003</year>
|
|
||||||
<p:price>49.99</p:price>
|
|
||||||
</book>
|
|
||||||
|
|
||||||
<book category="WEB">
|
|
||||||
<title lang="en">Learning XML</title>
|
|
||||||
<author>Erik T. Ray</author>
|
|
||||||
<year>2003</year>
|
|
||||||
<p:price>39.95</p:price>
|
|
||||||
</book>
|
|
||||||
|
|
||||||
</bookstore>
|
|
||||||
```
|
|
||||||
|
|
||||||
This code reads the file's contents into an etree document.
|
|
||||||
```go
|
|
||||||
doc := etree.NewDocument()
|
|
||||||
if err := doc.ReadFromFile("bookstore.xml"); err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
You can also read XML from a string, a byte slice, or an `io.Reader`.
|
|
||||||
|
|
||||||
### Processing elements and attributes
|
|
||||||
|
|
||||||
This example illustrates several ways to access elements and attributes using
|
|
||||||
etree selection queries.
|
|
||||||
```go
|
|
||||||
root := doc.SelectElement("bookstore")
|
|
||||||
fmt.Println("ROOT element:", root.Tag)
|
|
||||||
|
|
||||||
for _, book := range root.SelectElements("book") {
|
|
||||||
fmt.Println("CHILD element:", book.Tag)
|
|
||||||
if title := book.SelectElement("title"); title != nil {
|
|
||||||
lang := title.SelectAttrValue("lang", "unknown")
|
|
||||||
fmt.Printf(" TITLE: %s (%s)\n", title.Text(), lang)
|
|
||||||
}
|
|
||||||
for _, attr := range book.Attr {
|
|
||||||
fmt.Printf(" ATTR: %s=%s\n", attr.Key, attr.Value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
Output:
|
|
||||||
```
|
|
||||||
ROOT element: bookstore
|
|
||||||
CHILD element: book
|
|
||||||
TITLE: Everyday Italian (en)
|
|
||||||
ATTR: category=COOKING
|
|
||||||
CHILD element: book
|
|
||||||
TITLE: Harry Potter (en)
|
|
||||||
ATTR: category=CHILDREN
|
|
||||||
CHILD element: book
|
|
||||||
TITLE: XQuery Kick Start (en)
|
|
||||||
ATTR: category=WEB
|
|
||||||
CHILD element: book
|
|
||||||
TITLE: Learning XML (en)
|
|
||||||
ATTR: category=WEB
|
|
||||||
```
|
|
||||||
|
|
||||||
### Path queries
|
|
||||||
|
|
||||||
This example uses etree's path functions to select all book titles that fall
|
|
||||||
into the category of 'WEB'. The double-slash prefix in the path causes the
|
|
||||||
search for book elements to occur recursively; book elements may appear at any
|
|
||||||
level of the XML hierarchy.
|
|
||||||
```go
|
|
||||||
for _, t := range doc.FindElements("//book[@category='WEB']/title") {
|
|
||||||
fmt.Println("Title:", t.Text())
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Output:
|
|
||||||
```
|
|
||||||
Title: XQuery Kick Start
|
|
||||||
Title: Learning XML
|
|
||||||
```
|
|
||||||
|
|
||||||
This example finds the first book element under the root bookstore element and
|
|
||||||
outputs the tag and text of each of its child elements.
|
|
||||||
```go
|
|
||||||
for _, e := range doc.FindElements("./bookstore/book[1]/*") {
|
|
||||||
fmt.Printf("%s: %s\n", e.Tag, e.Text())
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Output:
|
|
||||||
```
|
|
||||||
title: Everyday Italian
|
|
||||||
author: Giada De Laurentiis
|
|
||||||
year: 2005
|
|
||||||
price: 30.00
|
|
||||||
```
|
|
||||||
|
|
||||||
This example finds all books with a price of 49.99 and outputs their titles.
|
|
||||||
```go
|
|
||||||
path := etree.MustCompilePath("./bookstore/book[p:price='49.99']/title")
|
|
||||||
for _, e := range doc.FindElementsPath(path) {
|
|
||||||
fmt.Println(e.Text())
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Output:
|
|
||||||
```
|
|
||||||
XQuery Kick Start
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that this example uses the FindElementsPath function, which takes as an
|
|
||||||
argument a pre-compiled path object. Use precompiled paths when you plan to
|
|
||||||
search with the same path more than once.
|
|
||||||
|
|
||||||
### Other features
|
|
||||||
|
|
||||||
These are just a few examples of the things the etree package can do. See the
|
|
||||||
[documentation](http://godoc.org/github.com/beevik/etree) for a complete
|
|
||||||
description of its capabilities.
|
|
||||||
|
|
||||||
### Contributing
|
|
||||||
|
|
||||||
This project accepts contributions. Just fork the repo and submit a pull
|
|
||||||
request!
|
|
109
vendor/github.com/beevik/etree/RELEASE_NOTES.md
generated
vendored
109
vendor/github.com/beevik/etree/RELEASE_NOTES.md
generated
vendored
@ -1,109 +0,0 @@
|
|||||||
Release v1.1.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
**New Features**
|
|
||||||
|
|
||||||
* New attribute helpers.
|
|
||||||
* Added the `Element.SortAttrs` method, which lexicographically sorts an
|
|
||||||
element's attributes by key.
|
|
||||||
* New `ReadSettings` properties.
|
|
||||||
* Added `Entity` for the support of custom entity maps.
|
|
||||||
* New `WriteSettings` properties.
|
|
||||||
* Added `UseCRLF` to allow the output of CR-LF newlines instead of the
|
|
||||||
default LF newlines. This is useful on Windows systems.
|
|
||||||
* Additional support for text and CDATA sections.
|
|
||||||
* The `Element.Text` method now returns the concatenation of all consecutive
|
|
||||||
character data tokens immediately following an element's opening tag.
|
|
||||||
* Added `Element.SetCData` to replace the character data immediately
|
|
||||||
following an element's opening tag with a CDATA section.
|
|
||||||
* Added `Element.CreateCData` to create and add a CDATA section child
|
|
||||||
`CharData` token to an element.
|
|
||||||
* Added `Element.CreateText` to create and add a child text `CharData` token
|
|
||||||
to an element.
|
|
||||||
* Added `NewCData` to create a parentless CDATA section `CharData` token.
|
|
||||||
* Added `NewText` to create a parentless text `CharData`
|
|
||||||
token.
|
|
||||||
* Added `CharData.IsCData` to detect if the token contains a CDATA section.
|
|
||||||
* Added `CharData.IsWhitespace` to detect if the token contains whitespace
|
|
||||||
inserted by one of the document Indent functions.
|
|
||||||
* Modified `Element.SetText` so that it replaces a run of consecutive
|
|
||||||
character data tokens following the element's opening tag (instead of just
|
|
||||||
the first one).
|
|
||||||
* New "tail text" support.
|
|
||||||
* Added the `Element.Tail` method, which returns the text immediately
|
|
||||||
following an element's closing tag.
|
|
||||||
* Added the `Element.SetTail` method, which modifies the text immediately
|
|
||||||
following an element's closing tag.
|
|
||||||
* New element child insertion and removal methods.
|
|
||||||
* Added the `Element.InsertChildAt` method, which inserts a new child token
|
|
||||||
before the specified child token index.
|
|
||||||
* Added the `Element.RemoveChildAt` method, which removes the child token at
|
|
||||||
the specified child token index.
|
|
||||||
* New element and attribute queries.
|
|
||||||
* Added the `Element.Index` method, which returns the element's index within
|
|
||||||
its parent element's child token list.
|
|
||||||
* Added the `Element.NamespaceURI` method to return the namespace URI
|
|
||||||
associated with an element.
|
|
||||||
* Added the `Attr.NamespaceURI` method to return the namespace URI
|
|
||||||
associated with an element.
|
|
||||||
* Added the `Attr.Element` method to return the element that an attribute
|
|
||||||
belongs to.
|
|
||||||
* New Path filter functions.
|
|
||||||
* Added `[local-name()='val']` to keep elements whose unprefixed tag matches
|
|
||||||
the desired value.
|
|
||||||
* Added `[name()='val']` to keep elements whose full tag matches the desired
|
|
||||||
value.
|
|
||||||
* Added `[namespace-prefix()='val']` to keep elements whose namespace prefix
|
|
||||||
matches the desired value.
|
|
||||||
* Added `[namespace-uri()='val']` to keep elements whose namespace URI
|
|
||||||
matches the desired value.
|
|
||||||
|
|
||||||
**Bug Fixes**
|
|
||||||
|
|
||||||
* A default XML `CharSetReader` is now used to prevent failed parsing of XML
|
|
||||||
documents using certain encodings.
|
|
||||||
([Issue](https://github.com/beevik/etree/issues/53)).
|
|
||||||
* All characters are now properly escaped according to XML parsing rules.
|
|
||||||
([Issue](https://github.com/beevik/etree/issues/55)).
|
|
||||||
* The `Document.Indent` and `Document.IndentTabs` functions no longer insert
|
|
||||||
empty string `CharData` tokens.
|
|
||||||
|
|
||||||
**Deprecated**
|
|
||||||
|
|
||||||
* `Element`
|
|
||||||
* The `InsertChild` method is deprecated. Use `InsertChildAt` instead.
|
|
||||||
* The `CreateCharData` method is deprecated. Use `CreateText` instead.
|
|
||||||
* `CharData`
|
|
||||||
* The `NewCharData` method is deprecated. Use `NewText` instead.
|
|
||||||
|
|
||||||
|
|
||||||
Release v1.0.1
|
|
||||||
==============
|
|
||||||
|
|
||||||
**Changes**
|
|
||||||
|
|
||||||
* Added support for absolute etree Path queries. An absolute path begins with
|
|
||||||
`/` or `//` and begins its search from the element's document root.
|
|
||||||
* Added [`GetPath`](https://godoc.org/github.com/beevik/etree#Element.GetPath)
|
|
||||||
and [`GetRelativePath`](https://godoc.org/github.com/beevik/etree#Element.GetRelativePath)
|
|
||||||
functions to the [`Element`](https://godoc.org/github.com/beevik/etree#Element)
|
|
||||||
type.
|
|
||||||
|
|
||||||
**Breaking changes**
|
|
||||||
|
|
||||||
* A path starting with `//` is now interpreted as an absolute path.
|
|
||||||
Previously, it was interpreted as a relative path starting from the element
|
|
||||||
whose
|
|
||||||
[`FindElement`](https://godoc.org/github.com/beevik/etree#Element.FindElement)
|
|
||||||
method was called. To remain compatible with this release, all paths
|
|
||||||
prefixed with `//` should be prefixed with `.//` when called from any
|
|
||||||
element other than the document's root.
|
|
||||||
* [**edit 2/1/2019**]: Minor releases should not contain breaking changes.
|
|
||||||
Even though this breaking change was very minor, it was a mistake to include
|
|
||||||
it in this minor release. In the future, all breaking changes will be
|
|
||||||
limited to major releases (e.g., version 2.0.0).
|
|
||||||
|
|
||||||
Release v1.0.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
Initial release.
|
|
1453
vendor/github.com/beevik/etree/etree.go
generated
vendored
1453
vendor/github.com/beevik/etree/etree.go
generated
vendored
File diff suppressed because it is too large
Load Diff
276
vendor/github.com/beevik/etree/helpers.go
generated
vendored
276
vendor/github.com/beevik/etree/helpers.go
generated
vendored
@ -1,276 +0,0 @@
|
|||||||
// Copyright 2015-2019 Brett Vickers.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package etree
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"io"
|
|
||||||
"strings"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A simple stack
|
|
||||||
type stack struct {
|
|
||||||
data []interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stack) empty() bool {
|
|
||||||
return len(s.data) == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stack) push(value interface{}) {
|
|
||||||
s.data = append(s.data, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stack) pop() interface{} {
|
|
||||||
value := s.data[len(s.data)-1]
|
|
||||||
s.data[len(s.data)-1] = nil
|
|
||||||
s.data = s.data[:len(s.data)-1]
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stack) peek() interface{} {
|
|
||||||
return s.data[len(s.data)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
// A fifo is a simple first-in-first-out queue.
|
|
||||||
type fifo struct {
|
|
||||||
data []interface{}
|
|
||||||
head, tail int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *fifo) add(value interface{}) {
|
|
||||||
if f.len()+1 >= len(f.data) {
|
|
||||||
f.grow()
|
|
||||||
}
|
|
||||||
f.data[f.tail] = value
|
|
||||||
if f.tail++; f.tail == len(f.data) {
|
|
||||||
f.tail = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *fifo) remove() interface{} {
|
|
||||||
value := f.data[f.head]
|
|
||||||
f.data[f.head] = nil
|
|
||||||
if f.head++; f.head == len(f.data) {
|
|
||||||
f.head = 0
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *fifo) len() int {
|
|
||||||
if f.tail >= f.head {
|
|
||||||
return f.tail - f.head
|
|
||||||
}
|
|
||||||
return len(f.data) - f.head + f.tail
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *fifo) grow() {
|
|
||||||
c := len(f.data) * 2
|
|
||||||
if c == 0 {
|
|
||||||
c = 4
|
|
||||||
}
|
|
||||||
buf, count := make([]interface{}, c), f.len()
|
|
||||||
if f.tail >= f.head {
|
|
||||||
copy(buf[0:count], f.data[f.head:f.tail])
|
|
||||||
} else {
|
|
||||||
hindex := len(f.data) - f.head
|
|
||||||
copy(buf[0:hindex], f.data[f.head:])
|
|
||||||
copy(buf[hindex:count], f.data[:f.tail])
|
|
||||||
}
|
|
||||||
f.data, f.head, f.tail = buf, 0, count
|
|
||||||
}
|
|
||||||
|
|
||||||
// countReader implements a proxy reader that counts the number of
|
|
||||||
// bytes read from its encapsulated reader.
|
|
||||||
type countReader struct {
|
|
||||||
r io.Reader
|
|
||||||
bytes int64
|
|
||||||
}
|
|
||||||
|
|
||||||
func newCountReader(r io.Reader) *countReader {
|
|
||||||
return &countReader{r: r}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cr *countReader) Read(p []byte) (n int, err error) {
|
|
||||||
b, err := cr.r.Read(p)
|
|
||||||
cr.bytes += int64(b)
|
|
||||||
return b, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// countWriter implements a proxy writer that counts the number of
|
|
||||||
// bytes written by its encapsulated writer.
|
|
||||||
type countWriter struct {
|
|
||||||
w io.Writer
|
|
||||||
bytes int64
|
|
||||||
}
|
|
||||||
|
|
||||||
func newCountWriter(w io.Writer) *countWriter {
|
|
||||||
return &countWriter{w: w}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cw *countWriter) Write(p []byte) (n int, err error) {
|
|
||||||
b, err := cw.w.Write(p)
|
|
||||||
cw.bytes += int64(b)
|
|
||||||
return b, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// isWhitespace returns true if the byte slice contains only
|
|
||||||
// whitespace characters.
|
|
||||||
func isWhitespace(s string) bool {
|
|
||||||
for i := 0; i < len(s); i++ {
|
|
||||||
if c := s[i]; c != ' ' && c != '\t' && c != '\n' && c != '\r' {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// spaceMatch returns true if namespace a is the empty string
|
|
||||||
// or if namespace a equals namespace b.
|
|
||||||
func spaceMatch(a, b string) bool {
|
|
||||||
switch {
|
|
||||||
case a == "":
|
|
||||||
return true
|
|
||||||
default:
|
|
||||||
return a == b
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// spaceDecompose breaks a namespace:tag identifier at the ':'
|
|
||||||
// and returns the two parts.
|
|
||||||
func spaceDecompose(str string) (space, key string) {
|
|
||||||
colon := strings.IndexByte(str, ':')
|
|
||||||
if colon == -1 {
|
|
||||||
return "", str
|
|
||||||
}
|
|
||||||
return str[:colon], str[colon+1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strings used by indentCRLF and indentLF
|
|
||||||
const (
|
|
||||||
indentSpaces = "\r\n "
|
|
||||||
indentTabs = "\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t"
|
|
||||||
)
|
|
||||||
|
|
||||||
// indentCRLF returns a CRLF newline followed by n copies of the first
|
|
||||||
// non-CRLF character in the source string.
|
|
||||||
func indentCRLF(n int, source string) string {
|
|
||||||
switch {
|
|
||||||
case n < 0:
|
|
||||||
return source[:2]
|
|
||||||
case n < len(source)-1:
|
|
||||||
return source[:n+2]
|
|
||||||
default:
|
|
||||||
return source + strings.Repeat(source[2:3], n-len(source)+2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// indentLF returns a LF newline followed by n copies of the first non-LF
|
|
||||||
// character in the source string.
|
|
||||||
func indentLF(n int, source string) string {
|
|
||||||
switch {
|
|
||||||
case n < 0:
|
|
||||||
return source[1:2]
|
|
||||||
case n < len(source)-1:
|
|
||||||
return source[1 : n+2]
|
|
||||||
default:
|
|
||||||
return source[1:] + strings.Repeat(source[2:3], n-len(source)+2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// nextIndex returns the index of the next occurrence of sep in s,
|
|
||||||
// starting from offset. It returns -1 if the sep string is not found.
|
|
||||||
func nextIndex(s, sep string, offset int) int {
|
|
||||||
switch i := strings.Index(s[offset:], sep); i {
|
|
||||||
case -1:
|
|
||||||
return -1
|
|
||||||
default:
|
|
||||||
return offset + i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// isInteger returns true if the string s contains an integer.
|
|
||||||
func isInteger(s string) bool {
|
|
||||||
for i := 0; i < len(s); i++ {
|
|
||||||
if (s[i] < '0' || s[i] > '9') && !(i == 0 && s[i] == '-') {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
type escapeMode byte
|
|
||||||
|
|
||||||
const (
|
|
||||||
escapeNormal escapeMode = iota
|
|
||||||
escapeCanonicalText
|
|
||||||
escapeCanonicalAttr
|
|
||||||
)
|
|
||||||
|
|
||||||
// escapeString writes an escaped version of a string to the writer.
|
|
||||||
func escapeString(w *bufio.Writer, s string, m escapeMode) {
|
|
||||||
var esc []byte
|
|
||||||
last := 0
|
|
||||||
for i := 0; i < len(s); {
|
|
||||||
r, width := utf8.DecodeRuneInString(s[i:])
|
|
||||||
i += width
|
|
||||||
switch r {
|
|
||||||
case '&':
|
|
||||||
esc = []byte("&")
|
|
||||||
case '<':
|
|
||||||
esc = []byte("<")
|
|
||||||
case '>':
|
|
||||||
if m == escapeCanonicalAttr {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
esc = []byte(">")
|
|
||||||
case '\'':
|
|
||||||
if m != escapeNormal {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
esc = []byte("'")
|
|
||||||
case '"':
|
|
||||||
if m == escapeCanonicalText {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
esc = []byte(""")
|
|
||||||
case '\t':
|
|
||||||
if m != escapeCanonicalAttr {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
esc = []byte("	")
|
|
||||||
case '\n':
|
|
||||||
if m != escapeCanonicalAttr {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
esc = []byte("
")
|
|
||||||
case '\r':
|
|
||||||
if m == escapeNormal {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
esc = []byte("
")
|
|
||||||
default:
|
|
||||||
if !isInCharacterRange(r) || (r == 0xFFFD && width == 1) {
|
|
||||||
esc = []byte("\uFFFD")
|
|
||||||
break
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
w.WriteString(s[last : i-width])
|
|
||||||
w.Write(esc)
|
|
||||||
last = i
|
|
||||||
}
|
|
||||||
w.WriteString(s[last:])
|
|
||||||
}
|
|
||||||
|
|
||||||
func isInCharacterRange(r rune) bool {
|
|
||||||
return r == 0x09 ||
|
|
||||||
r == 0x0A ||
|
|
||||||
r == 0x0D ||
|
|
||||||
r >= 0x20 && r <= 0xD7FF ||
|
|
||||||
r >= 0xE000 && r <= 0xFFFD ||
|
|
||||||
r >= 0x10000 && r <= 0x10FFFF
|
|
||||||
}
|
|
582
vendor/github.com/beevik/etree/path.go
generated
vendored
582
vendor/github.com/beevik/etree/path.go
generated
vendored
@ -1,582 +0,0 @@
|
|||||||
// Copyright 2015-2019 Brett Vickers.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package etree
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
A Path is a string that represents a search path through an etree starting
|
|
||||||
from the document root or an arbitrary element. Paths are used with the
|
|
||||||
Element object's Find* methods to locate and return desired elements.
|
|
||||||
|
|
||||||
A Path consists of a series of slash-separated "selectors", each of which may
|
|
||||||
be modified by one or more bracket-enclosed "filters". Selectors are used to
|
|
||||||
traverse the etree from element to element, while filters are used to narrow
|
|
||||||
the list of candidate elements at each node.
|
|
||||||
|
|
||||||
Although etree Path strings are similar to XPath strings
|
|
||||||
(https://www.w3.org/TR/1999/REC-xpath-19991116/), they have a more limited set
|
|
||||||
of selectors and filtering options.
|
|
||||||
|
|
||||||
The following selectors are supported by etree Path strings:
|
|
||||||
|
|
||||||
. Select the current element.
|
|
||||||
.. Select the parent of the current element.
|
|
||||||
* Select all child elements of the current element.
|
|
||||||
/ Select the root element when used at the start of a path.
|
|
||||||
// Select all descendants of the current element.
|
|
||||||
tag Select all child elements with a name matching the tag.
|
|
||||||
|
|
||||||
The following basic filters are supported by etree Path strings:
|
|
||||||
|
|
||||||
[@attrib] Keep elements with an attribute named attrib.
|
|
||||||
[@attrib='val'] Keep elements with an attribute named attrib and value matching val.
|
|
||||||
[tag] Keep elements with a child element named tag.
|
|
||||||
[tag='val'] Keep elements with a child element named tag and text matching val.
|
|
||||||
[n] Keep the n-th element, where n is a numeric index starting from 1.
|
|
||||||
|
|
||||||
The following function filters are also supported:
|
|
||||||
|
|
||||||
[text()] Keep elements with non-empty text.
|
|
||||||
[text()='val'] Keep elements whose text matches val.
|
|
||||||
[local-name()='val'] Keep elements whose un-prefixed tag matches val.
|
|
||||||
[name()='val'] Keep elements whose full tag exactly matches val.
|
|
||||||
[namespace-prefix()='val'] Keep elements whose namespace prefix matches val.
|
|
||||||
[namespace-uri()='val'] Keep elements whose namespace URI matches val.
|
|
||||||
|
|
||||||
Here are some examples of Path strings:
|
|
||||||
|
|
||||||
- Select the bookstore child element of the root element:
|
|
||||||
/bookstore
|
|
||||||
|
|
||||||
- Beginning from the root element, select the title elements of all
|
|
||||||
descendant book elements having a 'category' attribute of 'WEB':
|
|
||||||
//book[@category='WEB']/title
|
|
||||||
|
|
||||||
- Beginning from the current element, select the first descendant
|
|
||||||
book element with a title child element containing the text 'Great
|
|
||||||
Expectations':
|
|
||||||
.//book[title='Great Expectations'][1]
|
|
||||||
|
|
||||||
- Beginning from the current element, select all child elements of
|
|
||||||
book elements with an attribute 'language' set to 'english':
|
|
||||||
./book/*[@language='english']
|
|
||||||
|
|
||||||
- Beginning from the current element, select all child elements of
|
|
||||||
book elements containing the text 'special':
|
|
||||||
./book/*[text()='special']
|
|
||||||
|
|
||||||
- Beginning from the current element, select all descendant book
|
|
||||||
elements whose title child element has a 'language' attribute of 'french':
|
|
||||||
.//book/title[@language='french']/..
|
|
||||||
|
|
||||||
- Beginning from the current element, select all book elements
|
|
||||||
belonging to the http://www.w3.org/TR/html4/ namespace:
|
|
||||||
.//book[namespace-uri()='http://www.w3.org/TR/html4/']
|
|
||||||
|
|
||||||
*/
|
|
||||||
type Path struct {
|
|
||||||
segments []segment
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrPath is returned by path functions when an invalid etree path is provided.
|
|
||||||
type ErrPath string
|
|
||||||
|
|
||||||
// Error returns the string describing a path error.
|
|
||||||
func (err ErrPath) Error() string {
|
|
||||||
return "etree: " + string(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// CompilePath creates an optimized version of an XPath-like string that
|
|
||||||
// can be used to query elements in an element tree.
|
|
||||||
func CompilePath(path string) (Path, error) {
|
|
||||||
var comp compiler
|
|
||||||
segments := comp.parsePath(path)
|
|
||||||
if comp.err != ErrPath("") {
|
|
||||||
return Path{nil}, comp.err
|
|
||||||
}
|
|
||||||
return Path{segments}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// MustCompilePath creates an optimized version of an XPath-like string that
|
|
||||||
// can be used to query elements in an element tree. Panics if an error
|
|
||||||
// occurs. Use this function to create Paths when you know the path is
|
|
||||||
// valid (i.e., if it's hard-coded).
|
|
||||||
func MustCompilePath(path string) Path {
|
|
||||||
p, err := CompilePath(path)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
// A segment is a portion of a path between "/" characters.
|
|
||||||
// It contains one selector and zero or more [filters].
|
|
||||||
type segment struct {
|
|
||||||
sel selector
|
|
||||||
filters []filter
|
|
||||||
}
|
|
||||||
|
|
||||||
func (seg *segment) apply(e *Element, p *pather) {
|
|
||||||
seg.sel.apply(e, p)
|
|
||||||
for _, f := range seg.filters {
|
|
||||||
f.apply(p)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// A selector selects XML elements for consideration by the
|
|
||||||
// path traversal.
|
|
||||||
type selector interface {
|
|
||||||
apply(e *Element, p *pather)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A filter pares down a list of candidate XML elements based
|
|
||||||
// on a path filter in [brackets].
|
|
||||||
type filter interface {
|
|
||||||
apply(p *pather)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A pather is helper object that traverses an element tree using
|
|
||||||
// a Path object. It collects and deduplicates all elements matching
|
|
||||||
// the path query.
|
|
||||||
type pather struct {
|
|
||||||
queue fifo
|
|
||||||
results []*Element
|
|
||||||
inResults map[*Element]bool
|
|
||||||
candidates []*Element
|
|
||||||
scratch []*Element // used by filters
|
|
||||||
}
|
|
||||||
|
|
||||||
// A node represents an element and the remaining path segments that
|
|
||||||
// should be applied against it by the pather.
|
|
||||||
type node struct {
|
|
||||||
e *Element
|
|
||||||
segments []segment
|
|
||||||
}
|
|
||||||
|
|
||||||
func newPather() *pather {
|
|
||||||
return &pather{
|
|
||||||
results: make([]*Element, 0),
|
|
||||||
inResults: make(map[*Element]bool),
|
|
||||||
candidates: make([]*Element, 0),
|
|
||||||
scratch: make([]*Element, 0),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// traverse follows the path from the element e, collecting
|
|
||||||
// and then returning all elements that match the path's selectors
|
|
||||||
// and filters.
|
|
||||||
func (p *pather) traverse(e *Element, path Path) []*Element {
|
|
||||||
for p.queue.add(node{e, path.segments}); p.queue.len() > 0; {
|
|
||||||
p.eval(p.queue.remove().(node))
|
|
||||||
}
|
|
||||||
return p.results
|
|
||||||
}
|
|
||||||
|
|
||||||
// eval evalutes the current path node by applying the remaining
|
|
||||||
// path's selector rules against the node's element.
|
|
||||||
func (p *pather) eval(n node) {
|
|
||||||
p.candidates = p.candidates[0:0]
|
|
||||||
seg, remain := n.segments[0], n.segments[1:]
|
|
||||||
seg.apply(n.e, p)
|
|
||||||
|
|
||||||
if len(remain) == 0 {
|
|
||||||
for _, c := range p.candidates {
|
|
||||||
if in := p.inResults[c]; !in {
|
|
||||||
p.inResults[c] = true
|
|
||||||
p.results = append(p.results, c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for _, c := range p.candidates {
|
|
||||||
p.queue.add(node{c, remain})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// A compiler generates a compiled path from a path string.
|
|
||||||
type compiler struct {
|
|
||||||
err ErrPath
|
|
||||||
}
|
|
||||||
|
|
||||||
// parsePath parses an XPath-like string describing a path
|
|
||||||
// through an element tree and returns a slice of segment
|
|
||||||
// descriptors.
|
|
||||||
func (c *compiler) parsePath(path string) []segment {
|
|
||||||
// If path ends with //, fix it
|
|
||||||
if strings.HasSuffix(path, "//") {
|
|
||||||
path = path + "*"
|
|
||||||
}
|
|
||||||
|
|
||||||
var segments []segment
|
|
||||||
|
|
||||||
// Check for an absolute path
|
|
||||||
if strings.HasPrefix(path, "/") {
|
|
||||||
segments = append(segments, segment{new(selectRoot), []filter{}})
|
|
||||||
path = path[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Split path into segments
|
|
||||||
for _, s := range splitPath(path) {
|
|
||||||
segments = append(segments, c.parseSegment(s))
|
|
||||||
if c.err != ErrPath("") {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return segments
|
|
||||||
}
|
|
||||||
|
|
||||||
func splitPath(path string) []string {
|
|
||||||
pieces := make([]string, 0)
|
|
||||||
start := 0
|
|
||||||
inquote := false
|
|
||||||
for i := 0; i+1 <= len(path); i++ {
|
|
||||||
if path[i] == '\'' {
|
|
||||||
inquote = !inquote
|
|
||||||
} else if path[i] == '/' && !inquote {
|
|
||||||
pieces = append(pieces, path[start:i])
|
|
||||||
start = i + 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return append(pieces, path[start:])
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseSegment parses a path segment between / characters.
|
|
||||||
func (c *compiler) parseSegment(path string) segment {
|
|
||||||
pieces := strings.Split(path, "[")
|
|
||||||
seg := segment{
|
|
||||||
sel: c.parseSelector(pieces[0]),
|
|
||||||
filters: []filter{},
|
|
||||||
}
|
|
||||||
for i := 1; i < len(pieces); i++ {
|
|
||||||
fpath := pieces[i]
|
|
||||||
if fpath[len(fpath)-1] != ']' {
|
|
||||||
c.err = ErrPath("path has invalid filter [brackets].")
|
|
||||||
break
|
|
||||||
}
|
|
||||||
seg.filters = append(seg.filters, c.parseFilter(fpath[:len(fpath)-1]))
|
|
||||||
}
|
|
||||||
return seg
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseSelector parses a selector at the start of a path segment.
|
|
||||||
func (c *compiler) parseSelector(path string) selector {
|
|
||||||
switch path {
|
|
||||||
case ".":
|
|
||||||
return new(selectSelf)
|
|
||||||
case "..":
|
|
||||||
return new(selectParent)
|
|
||||||
case "*":
|
|
||||||
return new(selectChildren)
|
|
||||||
case "":
|
|
||||||
return new(selectDescendants)
|
|
||||||
default:
|
|
||||||
return newSelectChildrenByTag(path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var fnTable = map[string]struct {
|
|
||||||
hasFn func(e *Element) bool
|
|
||||||
getValFn func(e *Element) string
|
|
||||||
}{
|
|
||||||
"local-name": {nil, (*Element).name},
|
|
||||||
"name": {nil, (*Element).FullTag},
|
|
||||||
"namespace-prefix": {nil, (*Element).namespacePrefix},
|
|
||||||
"namespace-uri": {nil, (*Element).NamespaceURI},
|
|
||||||
"text": {(*Element).hasText, (*Element).Text},
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseFilter parses a path filter contained within [brackets].
|
|
||||||
func (c *compiler) parseFilter(path string) filter {
|
|
||||||
if len(path) == 0 {
|
|
||||||
c.err = ErrPath("path contains an empty filter expression.")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter contains [@attr='val'], [fn()='val'], or [tag='val']?
|
|
||||||
eqindex := strings.Index(path, "='")
|
|
||||||
if eqindex >= 0 {
|
|
||||||
rindex := nextIndex(path, "'", eqindex+2)
|
|
||||||
if rindex != len(path)-1 {
|
|
||||||
c.err = ErrPath("path has mismatched filter quotes.")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
key := path[:eqindex]
|
|
||||||
value := path[eqindex+2 : rindex]
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case key[0] == '@':
|
|
||||||
return newFilterAttrVal(key[1:], value)
|
|
||||||
case strings.HasSuffix(key, "()"):
|
|
||||||
fn := key[:len(key)-2]
|
|
||||||
if t, ok := fnTable[fn]; ok && t.getValFn != nil {
|
|
||||||
return newFilterFuncVal(t.getValFn, value)
|
|
||||||
}
|
|
||||||
c.err = ErrPath("path has unknown function " + fn)
|
|
||||||
return nil
|
|
||||||
default:
|
|
||||||
return newFilterChildText(key, value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter contains [@attr], [N], [tag] or [fn()]
|
|
||||||
switch {
|
|
||||||
case path[0] == '@':
|
|
||||||
return newFilterAttr(path[1:])
|
|
||||||
case strings.HasSuffix(path, "()"):
|
|
||||||
fn := path[:len(path)-2]
|
|
||||||
if t, ok := fnTable[fn]; ok && t.hasFn != nil {
|
|
||||||
return newFilterFunc(t.hasFn)
|
|
||||||
}
|
|
||||||
c.err = ErrPath("path has unknown function " + fn)
|
|
||||||
return nil
|
|
||||||
case isInteger(path):
|
|
||||||
pos, _ := strconv.Atoi(path)
|
|
||||||
switch {
|
|
||||||
case pos > 0:
|
|
||||||
return newFilterPos(pos - 1)
|
|
||||||
default:
|
|
||||||
return newFilterPos(pos)
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return newFilterChild(path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// selectSelf selects the current element into the candidate list.
|
|
||||||
type selectSelf struct{}
|
|
||||||
|
|
||||||
func (s *selectSelf) apply(e *Element, p *pather) {
|
|
||||||
p.candidates = append(p.candidates, e)
|
|
||||||
}
|
|
||||||
|
|
||||||
// selectRoot selects the element's root node.
|
|
||||||
type selectRoot struct{}
|
|
||||||
|
|
||||||
func (s *selectRoot) apply(e *Element, p *pather) {
|
|
||||||
root := e
|
|
||||||
for root.parent != nil {
|
|
||||||
root = root.parent
|
|
||||||
}
|
|
||||||
p.candidates = append(p.candidates, root)
|
|
||||||
}
|
|
||||||
|
|
||||||
// selectParent selects the element's parent into the candidate list.
|
|
||||||
type selectParent struct{}
|
|
||||||
|
|
||||||
func (s *selectParent) apply(e *Element, p *pather) {
|
|
||||||
if e.parent != nil {
|
|
||||||
p.candidates = append(p.candidates, e.parent)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// selectChildren selects the element's child elements into the
|
|
||||||
// candidate list.
|
|
||||||
type selectChildren struct{}
|
|
||||||
|
|
||||||
func (s *selectChildren) apply(e *Element, p *pather) {
|
|
||||||
for _, c := range e.Child {
|
|
||||||
if c, ok := c.(*Element); ok {
|
|
||||||
p.candidates = append(p.candidates, c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// selectDescendants selects all descendant child elements
|
|
||||||
// of the element into the candidate list.
|
|
||||||
type selectDescendants struct{}
|
|
||||||
|
|
||||||
func (s *selectDescendants) apply(e *Element, p *pather) {
|
|
||||||
var queue fifo
|
|
||||||
for queue.add(e); queue.len() > 0; {
|
|
||||||
e := queue.remove().(*Element)
|
|
||||||
p.candidates = append(p.candidates, e)
|
|
||||||
for _, c := range e.Child {
|
|
||||||
if c, ok := c.(*Element); ok {
|
|
||||||
queue.add(c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// selectChildrenByTag selects into the candidate list all child
|
|
||||||
// elements of the element having the specified tag.
|
|
||||||
type selectChildrenByTag struct {
|
|
||||||
space, tag string
|
|
||||||
}
|
|
||||||
|
|
||||||
func newSelectChildrenByTag(path string) *selectChildrenByTag {
|
|
||||||
s, l := spaceDecompose(path)
|
|
||||||
return &selectChildrenByTag{s, l}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *selectChildrenByTag) apply(e *Element, p *pather) {
|
|
||||||
for _, c := range e.Child {
|
|
||||||
if c, ok := c.(*Element); ok && spaceMatch(s.space, c.Space) && s.tag == c.Tag {
|
|
||||||
p.candidates = append(p.candidates, c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// filterPos filters the candidate list, keeping only the
|
|
||||||
// candidate at the specified index.
|
|
||||||
type filterPos struct {
|
|
||||||
index int
|
|
||||||
}
|
|
||||||
|
|
||||||
func newFilterPos(pos int) *filterPos {
|
|
||||||
return &filterPos{pos}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *filterPos) apply(p *pather) {
|
|
||||||
if f.index >= 0 {
|
|
||||||
if f.index < len(p.candidates) {
|
|
||||||
p.scratch = append(p.scratch, p.candidates[f.index])
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if -f.index <= len(p.candidates) {
|
|
||||||
p.scratch = append(p.scratch, p.candidates[len(p.candidates)+f.index])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// filterAttr filters the candidate list for elements having
|
|
||||||
// the specified attribute.
|
|
||||||
type filterAttr struct {
|
|
||||||
space, key string
|
|
||||||
}
|
|
||||||
|
|
||||||
func newFilterAttr(str string) *filterAttr {
|
|
||||||
s, l := spaceDecompose(str)
|
|
||||||
return &filterAttr{s, l}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *filterAttr) apply(p *pather) {
|
|
||||||
for _, c := range p.candidates {
|
|
||||||
for _, a := range c.Attr {
|
|
||||||
if spaceMatch(f.space, a.Space) && f.key == a.Key {
|
|
||||||
p.scratch = append(p.scratch, c)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// filterAttrVal filters the candidate list for elements having
|
|
||||||
// the specified attribute with the specified value.
|
|
||||||
type filterAttrVal struct {
|
|
||||||
space, key, val string
|
|
||||||
}
|
|
||||||
|
|
||||||
func newFilterAttrVal(str, value string) *filterAttrVal {
|
|
||||||
s, l := spaceDecompose(str)
|
|
||||||
return &filterAttrVal{s, l, value}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *filterAttrVal) apply(p *pather) {
|
|
||||||
for _, c := range p.candidates {
|
|
||||||
for _, a := range c.Attr {
|
|
||||||
if spaceMatch(f.space, a.Space) && f.key == a.Key && f.val == a.Value {
|
|
||||||
p.scratch = append(p.scratch, c)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// filterFunc filters the candidate list for elements satisfying a custom
|
|
||||||
// boolean function.
|
|
||||||
type filterFunc struct {
|
|
||||||
fn func(e *Element) bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func newFilterFunc(fn func(e *Element) bool) *filterFunc {
|
|
||||||
return &filterFunc{fn}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *filterFunc) apply(p *pather) {
|
|
||||||
for _, c := range p.candidates {
|
|
||||||
if f.fn(c) {
|
|
||||||
p.scratch = append(p.scratch, c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// filterFuncVal filters the candidate list for elements containing a value
|
|
||||||
// matching the result of a custom function.
|
|
||||||
type filterFuncVal struct {
|
|
||||||
fn func(e *Element) string
|
|
||||||
val string
|
|
||||||
}
|
|
||||||
|
|
||||||
func newFilterFuncVal(fn func(e *Element) string, value string) *filterFuncVal {
|
|
||||||
return &filterFuncVal{fn, value}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *filterFuncVal) apply(p *pather) {
|
|
||||||
for _, c := range p.candidates {
|
|
||||||
if f.fn(c) == f.val {
|
|
||||||
p.scratch = append(p.scratch, c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// filterChild filters the candidate list for elements having
|
|
||||||
// a child element with the specified tag.
|
|
||||||
type filterChild struct {
|
|
||||||
space, tag string
|
|
||||||
}
|
|
||||||
|
|
||||||
func newFilterChild(str string) *filterChild {
|
|
||||||
s, l := spaceDecompose(str)
|
|
||||||
return &filterChild{s, l}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *filterChild) apply(p *pather) {
|
|
||||||
for _, c := range p.candidates {
|
|
||||||
for _, cc := range c.Child {
|
|
||||||
if cc, ok := cc.(*Element); ok &&
|
|
||||||
spaceMatch(f.space, cc.Space) &&
|
|
||||||
f.tag == cc.Tag {
|
|
||||||
p.scratch = append(p.scratch, c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// filterChildText filters the candidate list for elements having
|
|
||||||
// a child element with the specified tag and text.
|
|
||||||
type filterChildText struct {
|
|
||||||
space, tag, text string
|
|
||||||
}
|
|
||||||
|
|
||||||
func newFilterChildText(str, text string) *filterChildText {
|
|
||||||
s, l := spaceDecompose(str)
|
|
||||||
return &filterChildText{s, l, text}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *filterChildText) apply(p *pather) {
|
|
||||||
for _, c := range p.candidates {
|
|
||||||
for _, cc := range c.Child {
|
|
||||||
if cc, ok := cc.(*Element); ok &&
|
|
||||||
spaceMatch(f.space, cc.Space) &&
|
|
||||||
f.tag == cc.Tag &&
|
|
||||||
f.text == cc.Text() {
|
|
||||||
p.scratch = append(p.scratch, c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
|
||||||
}
|
|
20
vendor/github.com/beorn7/perks/LICENSE
generated
vendored
20
vendor/github.com/beorn7/perks/LICENSE
generated
vendored
@ -1,20 +0,0 @@
|
|||||||
Copyright (C) 2013 Blake Mizerany
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
2388
vendor/github.com/beorn7/perks/quantile/exampledata.txt
generated
vendored
2388
vendor/github.com/beorn7/perks/quantile/exampledata.txt
generated
vendored
File diff suppressed because it is too large
Load Diff
316
vendor/github.com/beorn7/perks/quantile/stream.go
generated
vendored
316
vendor/github.com/beorn7/perks/quantile/stream.go
generated
vendored
@ -1,316 +0,0 @@
|
|||||||
// Package quantile computes approximate quantiles over an unbounded data
|
|
||||||
// stream within low memory and CPU bounds.
|
|
||||||
//
|
|
||||||
// A small amount of accuracy is traded to achieve the above properties.
|
|
||||||
//
|
|
||||||
// Multiple streams can be merged before calling Query to generate a single set
|
|
||||||
// of results. This is meaningful when the streams represent the same type of
|
|
||||||
// data. See Merge and Samples.
|
|
||||||
//
|
|
||||||
// For more detailed information about the algorithm used, see:
|
|
||||||
//
|
|
||||||
// Effective Computation of Biased Quantiles over Data Streams
|
|
||||||
//
|
|
||||||
// http://www.cs.rutgers.edu/~muthu/bquant.pdf
|
|
||||||
package quantile
|
|
||||||
|
|
||||||
import (
|
|
||||||
"math"
|
|
||||||
"sort"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Sample holds an observed value and meta information for compression. JSON
|
|
||||||
// tags have been added for convenience.
|
|
||||||
type Sample struct {
|
|
||||||
Value float64 `json:",string"`
|
|
||||||
Width float64 `json:",string"`
|
|
||||||
Delta float64 `json:",string"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Samples represents a slice of samples. It implements sort.Interface.
|
|
||||||
type Samples []Sample
|
|
||||||
|
|
||||||
func (a Samples) Len() int { return len(a) }
|
|
||||||
func (a Samples) Less(i, j int) bool { return a[i].Value < a[j].Value }
|
|
||||||
func (a Samples) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
|
||||||
|
|
||||||
type invariant func(s *stream, r float64) float64
|
|
||||||
|
|
||||||
// NewLowBiased returns an initialized Stream for low-biased quantiles
|
|
||||||
// (e.g. 0.01, 0.1, 0.5) where the needed quantiles are not known a priori, but
|
|
||||||
// error guarantees can still be given even for the lower ranks of the data
|
|
||||||
// distribution.
|
|
||||||
//
|
|
||||||
// The provided epsilon is a relative error, i.e. the true quantile of a value
|
|
||||||
// returned by a query is guaranteed to be within (1±Epsilon)*Quantile.
|
|
||||||
//
|
|
||||||
// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error
|
|
||||||
// properties.
|
|
||||||
func NewLowBiased(epsilon float64) *Stream {
|
|
||||||
ƒ := func(s *stream, r float64) float64 {
|
|
||||||
return 2 * epsilon * r
|
|
||||||
}
|
|
||||||
return newStream(ƒ)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewHighBiased returns an initialized Stream for high-biased quantiles
|
|
||||||
// (e.g. 0.01, 0.1, 0.5) where the needed quantiles are not known a priori, but
|
|
||||||
// error guarantees can still be given even for the higher ranks of the data
|
|
||||||
// distribution.
|
|
||||||
//
|
|
||||||
// The provided epsilon is a relative error, i.e. the true quantile of a value
|
|
||||||
// returned by a query is guaranteed to be within 1-(1±Epsilon)*(1-Quantile).
|
|
||||||
//
|
|
||||||
// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error
|
|
||||||
// properties.
|
|
||||||
func NewHighBiased(epsilon float64) *Stream {
|
|
||||||
ƒ := func(s *stream, r float64) float64 {
|
|
||||||
return 2 * epsilon * (s.n - r)
|
|
||||||
}
|
|
||||||
return newStream(ƒ)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewTargeted returns an initialized Stream concerned with a particular set of
|
|
||||||
// quantile values that are supplied a priori. Knowing these a priori reduces
|
|
||||||
// space and computation time. The targets map maps the desired quantiles to
|
|
||||||
// their absolute errors, i.e. the true quantile of a value returned by a query
|
|
||||||
// is guaranteed to be within (Quantile±Epsilon).
|
|
||||||
//
|
|
||||||
// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error properties.
|
|
||||||
func NewTargeted(targetMap map[float64]float64) *Stream {
|
|
||||||
// Convert map to slice to avoid slow iterations on a map.
|
|
||||||
// ƒ is called on the hot path, so converting the map to a slice
|
|
||||||
// beforehand results in significant CPU savings.
|
|
||||||
targets := targetMapToSlice(targetMap)
|
|
||||||
|
|
||||||
ƒ := func(s *stream, r float64) float64 {
|
|
||||||
var m = math.MaxFloat64
|
|
||||||
var f float64
|
|
||||||
for _, t := range targets {
|
|
||||||
if t.quantile*s.n <= r {
|
|
||||||
f = (2 * t.epsilon * r) / t.quantile
|
|
||||||
} else {
|
|
||||||
f = (2 * t.epsilon * (s.n - r)) / (1 - t.quantile)
|
|
||||||
}
|
|
||||||
if f < m {
|
|
||||||
m = f
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
return newStream(ƒ)
|
|
||||||
}
|
|
||||||
|
|
||||||
type target struct {
|
|
||||||
quantile float64
|
|
||||||
epsilon float64
|
|
||||||
}
|
|
||||||
|
|
||||||
func targetMapToSlice(targetMap map[float64]float64) []target {
|
|
||||||
targets := make([]target, 0, len(targetMap))
|
|
||||||
|
|
||||||
for quantile, epsilon := range targetMap {
|
|
||||||
t := target{
|
|
||||||
quantile: quantile,
|
|
||||||
epsilon: epsilon,
|
|
||||||
}
|
|
||||||
targets = append(targets, t)
|
|
||||||
}
|
|
||||||
|
|
||||||
return targets
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stream computes quantiles for a stream of float64s. It is not thread-safe by
|
|
||||||
// design. Take care when using across multiple goroutines.
|
|
||||||
type Stream struct {
|
|
||||||
*stream
|
|
||||||
b Samples
|
|
||||||
sorted bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func newStream(ƒ invariant) *Stream {
|
|
||||||
x := &stream{ƒ: ƒ}
|
|
||||||
return &Stream{x, make(Samples, 0, 500), true}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert inserts v into the stream.
|
|
||||||
func (s *Stream) Insert(v float64) {
|
|
||||||
s.insert(Sample{Value: v, Width: 1})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Stream) insert(sample Sample) {
|
|
||||||
s.b = append(s.b, sample)
|
|
||||||
s.sorted = false
|
|
||||||
if len(s.b) == cap(s.b) {
|
|
||||||
s.flush()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Query returns the computed qth percentiles value. If s was created with
|
|
||||||
// NewTargeted, and q is not in the set of quantiles provided a priori, Query
|
|
||||||
// will return an unspecified result.
|
|
||||||
func (s *Stream) Query(q float64) float64 {
|
|
||||||
if !s.flushed() {
|
|
||||||
// Fast path when there hasn't been enough data for a flush;
|
|
||||||
// this also yields better accuracy for small sets of data.
|
|
||||||
l := len(s.b)
|
|
||||||
if l == 0 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
i := int(math.Ceil(float64(l) * q))
|
|
||||||
if i > 0 {
|
|
||||||
i -= 1
|
|
||||||
}
|
|
||||||
s.maybeSort()
|
|
||||||
return s.b[i].Value
|
|
||||||
}
|
|
||||||
s.flush()
|
|
||||||
return s.stream.query(q)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge merges samples into the underlying streams samples. This is handy when
|
|
||||||
// merging multiple streams from separate threads, database shards, etc.
|
|
||||||
//
|
|
||||||
// ATTENTION: This method is broken and does not yield correct results. The
|
|
||||||
// underlying algorithm is not capable of merging streams correctly.
|
|
||||||
func (s *Stream) Merge(samples Samples) {
|
|
||||||
sort.Sort(samples)
|
|
||||||
s.stream.merge(samples)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset reinitializes and clears the list reusing the samples buffer memory.
|
|
||||||
func (s *Stream) Reset() {
|
|
||||||
s.stream.reset()
|
|
||||||
s.b = s.b[:0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Samples returns stream samples held by s.
|
|
||||||
func (s *Stream) Samples() Samples {
|
|
||||||
if !s.flushed() {
|
|
||||||
return s.b
|
|
||||||
}
|
|
||||||
s.flush()
|
|
||||||
return s.stream.samples()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Count returns the total number of samples observed in the stream
|
|
||||||
// since initialization.
|
|
||||||
func (s *Stream) Count() int {
|
|
||||||
return len(s.b) + s.stream.count()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Stream) flush() {
|
|
||||||
s.maybeSort()
|
|
||||||
s.stream.merge(s.b)
|
|
||||||
s.b = s.b[:0]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Stream) maybeSort() {
|
|
||||||
if !s.sorted {
|
|
||||||
s.sorted = true
|
|
||||||
sort.Sort(s.b)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Stream) flushed() bool {
|
|
||||||
return len(s.stream.l) > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
type stream struct {
|
|
||||||
n float64
|
|
||||||
l []Sample
|
|
||||||
ƒ invariant
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stream) reset() {
|
|
||||||
s.l = s.l[:0]
|
|
||||||
s.n = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stream) insert(v float64) {
|
|
||||||
s.merge(Samples{{v, 1, 0}})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stream) merge(samples Samples) {
|
|
||||||
// TODO(beorn7): This tries to merge not only individual samples, but
|
|
||||||
// whole summaries. The paper doesn't mention merging summaries at
|
|
||||||
// all. Unittests show that the merging is inaccurate. Find out how to
|
|
||||||
// do merges properly.
|
|
||||||
var r float64
|
|
||||||
i := 0
|
|
||||||
for _, sample := range samples {
|
|
||||||
for ; i < len(s.l); i++ {
|
|
||||||
c := s.l[i]
|
|
||||||
if c.Value > sample.Value {
|
|
||||||
// Insert at position i.
|
|
||||||
s.l = append(s.l, Sample{})
|
|
||||||
copy(s.l[i+1:], s.l[i:])
|
|
||||||
s.l[i] = Sample{
|
|
||||||
sample.Value,
|
|
||||||
sample.Width,
|
|
||||||
math.Max(sample.Delta, math.Floor(s.ƒ(s, r))-1),
|
|
||||||
// TODO(beorn7): How to calculate delta correctly?
|
|
||||||
}
|
|
||||||
i++
|
|
||||||
goto inserted
|
|
||||||
}
|
|
||||||
r += c.Width
|
|
||||||
}
|
|
||||||
s.l = append(s.l, Sample{sample.Value, sample.Width, 0})
|
|
||||||
i++
|
|
||||||
inserted:
|
|
||||||
s.n += sample.Width
|
|
||||||
r += sample.Width
|
|
||||||
}
|
|
||||||
s.compress()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stream) count() int {
|
|
||||||
return int(s.n)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stream) query(q float64) float64 {
|
|
||||||
t := math.Ceil(q * s.n)
|
|
||||||
t += math.Ceil(s.ƒ(s, t) / 2)
|
|
||||||
p := s.l[0]
|
|
||||||
var r float64
|
|
||||||
for _, c := range s.l[1:] {
|
|
||||||
r += p.Width
|
|
||||||
if r+c.Width+c.Delta > t {
|
|
||||||
return p.Value
|
|
||||||
}
|
|
||||||
p = c
|
|
||||||
}
|
|
||||||
return p.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stream) compress() {
|
|
||||||
if len(s.l) < 2 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
x := s.l[len(s.l)-1]
|
|
||||||
xi := len(s.l) - 1
|
|
||||||
r := s.n - 1 - x.Width
|
|
||||||
|
|
||||||
for i := len(s.l) - 2; i >= 0; i-- {
|
|
||||||
c := s.l[i]
|
|
||||||
if c.Width+x.Width+x.Delta <= s.ƒ(s, r) {
|
|
||||||
x.Width += c.Width
|
|
||||||
s.l[xi] = x
|
|
||||||
// Remove element at i.
|
|
||||||
copy(s.l[i:], s.l[i+1:])
|
|
||||||
s.l = s.l[:len(s.l)-1]
|
|
||||||
xi -= 1
|
|
||||||
} else {
|
|
||||||
x = c
|
|
||||||
xi = i
|
|
||||||
}
|
|
||||||
r -= c.Width
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stream) samples() Samples {
|
|
||||||
samples := make(Samples, len(s.l))
|
|
||||||
copy(samples, s.l)
|
|
||||||
return samples
|
|
||||||
}
|
|
1
vendor/github.com/boombuler/barcode/.gitignore
generated
vendored
1
vendor/github.com/boombuler/barcode/.gitignore
generated
vendored
@ -1 +0,0 @@
|
|||||||
.vscode/
|
|
21
vendor/github.com/boombuler/barcode/LICENSE
generated
vendored
21
vendor/github.com/boombuler/barcode/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2014 Florian Sundermann
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
53
vendor/github.com/boombuler/barcode/README.md
generated
vendored
53
vendor/github.com/boombuler/barcode/README.md
generated
vendored
@ -1,53 +0,0 @@
|
|||||||
[](https://gitter.im/golang-barcode/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
|
||||||
|
|
||||||
## Introduction ##
|
|
||||||
|
|
||||||
This is a package for GO which can be used to create different types of barcodes.
|
|
||||||
|
|
||||||
## Supported Barcode Types ##
|
|
||||||
* 2 of 5
|
|
||||||
* Aztec Code
|
|
||||||
* Codabar
|
|
||||||
* Code 128
|
|
||||||
* Code 39
|
|
||||||
* Code 93
|
|
||||||
* Datamatrix
|
|
||||||
* EAN 13
|
|
||||||
* EAN 8
|
|
||||||
* PDF 417
|
|
||||||
* QR Code
|
|
||||||
|
|
||||||
## Example ##
|
|
||||||
|
|
||||||
This is a simple example on how to create a QR-Code and write it to a png-file
|
|
||||||
```go
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"image/png"
|
|
||||||
"os"
|
|
||||||
|
|
||||||
"github.com/boombuler/barcode"
|
|
||||||
"github.com/boombuler/barcode/qr"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
// Create the barcode
|
|
||||||
qrCode, _ := qr.Encode("Hello World", qr.M, qr.Auto)
|
|
||||||
|
|
||||||
// Scale the barcode to 200x200 pixels
|
|
||||||
qrCode, _ = barcode.Scale(qrCode, 200, 200)
|
|
||||||
|
|
||||||
// create the output file
|
|
||||||
file, _ := os.Create("qrcode.png")
|
|
||||||
defer file.Close()
|
|
||||||
|
|
||||||
// encode the barcode as png
|
|
||||||
png.Encode(file, qrCode)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Documentation ##
|
|
||||||
See [GoDoc](https://godoc.org/github.com/boombuler/barcode)
|
|
||||||
|
|
||||||
To create a barcode use the Encode function from one of the subpackages.
|
|
42
vendor/github.com/boombuler/barcode/barcode.go
generated
vendored
42
vendor/github.com/boombuler/barcode/barcode.go
generated
vendored
@ -1,42 +0,0 @@
|
|||||||
package barcode
|
|
||||||
|
|
||||||
import "image"
|
|
||||||
|
|
||||||
const (
|
|
||||||
TypeAztec = "Aztec"
|
|
||||||
TypeCodabar = "Codabar"
|
|
||||||
TypeCode128 = "Code 128"
|
|
||||||
TypeCode39 = "Code 39"
|
|
||||||
TypeCode93 = "Code 93"
|
|
||||||
TypeDataMatrix = "DataMatrix"
|
|
||||||
TypeEAN8 = "EAN 8"
|
|
||||||
TypeEAN13 = "EAN 13"
|
|
||||||
TypePDF = "PDF417"
|
|
||||||
TypeQR = "QR Code"
|
|
||||||
Type2of5 = "2 of 5"
|
|
||||||
Type2of5Interleaved = "2 of 5 (interleaved)"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Contains some meta information about a barcode
|
|
||||||
type Metadata struct {
|
|
||||||
// the name of the barcode kind
|
|
||||||
CodeKind string
|
|
||||||
// contains 1 for 1D barcodes or 2 for 2D barcodes
|
|
||||||
Dimensions byte
|
|
||||||
}
|
|
||||||
|
|
||||||
// a rendered and encoded barcode
|
|
||||||
type Barcode interface {
|
|
||||||
image.Image
|
|
||||||
// returns some meta information about the barcode
|
|
||||||
Metadata() Metadata
|
|
||||||
// the data that was encoded in this barcode
|
|
||||||
Content() string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Additional interface that some barcodes might implement to provide
|
|
||||||
// the value of its checksum.
|
|
||||||
type BarcodeIntCS interface {
|
|
||||||
Barcode
|
|
||||||
CheckSum() int
|
|
||||||
}
|
|
1
vendor/github.com/boombuler/barcode/go.mod
generated
vendored
1
vendor/github.com/boombuler/barcode/go.mod
generated
vendored
@ -1 +0,0 @@
|
|||||||
module github.com/boombuler/barcode
|
|
66
vendor/github.com/boombuler/barcode/qr/alphanumeric.go
generated
vendored
66
vendor/github.com/boombuler/barcode/qr/alphanumeric.go
generated
vendored
@ -1,66 +0,0 @@
|
|||||||
package qr
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/boombuler/barcode/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
const charSet string = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:"
|
|
||||||
|
|
||||||
func stringToAlphaIdx(content string) <-chan int {
|
|
||||||
result := make(chan int)
|
|
||||||
go func() {
|
|
||||||
for _, r := range content {
|
|
||||||
idx := strings.IndexRune(charSet, r)
|
|
||||||
result <- idx
|
|
||||||
if idx < 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
close(result)
|
|
||||||
}()
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func encodeAlphaNumeric(content string, ecl ErrorCorrectionLevel) (*utils.BitList, *versionInfo, error) {
|
|
||||||
|
|
||||||
contentLenIsOdd := len(content)%2 == 1
|
|
||||||
contentBitCount := (len(content) / 2) * 11
|
|
||||||
if contentLenIsOdd {
|
|
||||||
contentBitCount += 6
|
|
||||||
}
|
|
||||||
vi := findSmallestVersionInfo(ecl, alphaNumericMode, contentBitCount)
|
|
||||||
if vi == nil {
|
|
||||||
return nil, nil, errors.New("To much data to encode")
|
|
||||||
}
|
|
||||||
|
|
||||||
res := new(utils.BitList)
|
|
||||||
res.AddBits(int(alphaNumericMode), 4)
|
|
||||||
res.AddBits(len(content), vi.charCountBits(alphaNumericMode))
|
|
||||||
|
|
||||||
encoder := stringToAlphaIdx(content)
|
|
||||||
|
|
||||||
for idx := 0; idx < len(content)/2; idx++ {
|
|
||||||
c1 := <-encoder
|
|
||||||
c2 := <-encoder
|
|
||||||
if c1 < 0 || c2 < 0 {
|
|
||||||
return nil, nil, fmt.Errorf("\"%s\" can not be encoded as %s", content, AlphaNumeric)
|
|
||||||
}
|
|
||||||
res.AddBits(c1*45+c2, 11)
|
|
||||||
}
|
|
||||||
if contentLenIsOdd {
|
|
||||||
c := <-encoder
|
|
||||||
if c < 0 {
|
|
||||||
return nil, nil, fmt.Errorf("\"%s\" can not be encoded as %s", content, AlphaNumeric)
|
|
||||||
}
|
|
||||||
res.AddBits(c, 6)
|
|
||||||
}
|
|
||||||
|
|
||||||
addPaddingAndTerminator(res, vi)
|
|
||||||
|
|
||||||
return res, vi, nil
|
|
||||||
}
|
|
23
vendor/github.com/boombuler/barcode/qr/automatic.go
generated
vendored
23
vendor/github.com/boombuler/barcode/qr/automatic.go
generated
vendored
@ -1,23 +0,0 @@
|
|||||||
package qr
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/boombuler/barcode/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
func encodeAuto(content string, ecl ErrorCorrectionLevel) (*utils.BitList, *versionInfo, error) {
|
|
||||||
bits, vi, _ := Numeric.getEncoder()(content, ecl)
|
|
||||||
if bits != nil && vi != nil {
|
|
||||||
return bits, vi, nil
|
|
||||||
}
|
|
||||||
bits, vi, _ = AlphaNumeric.getEncoder()(content, ecl)
|
|
||||||
if bits != nil && vi != nil {
|
|
||||||
return bits, vi, nil
|
|
||||||
}
|
|
||||||
bits, vi, _ = Unicode.getEncoder()(content, ecl)
|
|
||||||
if bits != nil && vi != nil {
|
|
||||||
return bits, vi, nil
|
|
||||||
}
|
|
||||||
return nil, nil, fmt.Errorf("No encoding found to encode \"%s\"", content)
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user