diff --git a/acceptance.bats b/acceptance.bats
index a8525ad..2a6d94c 100755
--- a/acceptance.bats
+++ b/acceptance.bats
@@ -154,7 +154,12 @@ resetCacheFolder() {
}
@test "Pass when using a valid HTTP -schema-location" {
- run bin/kubeconform -schema-location 'https://kubernetesjsonschema.dev/{{ .NormalizedKubernetesVersion }}-standalone{{ .StrictSuffix }}/{{ .ResourceKind }}{{ .KindSuffix }}.json' fixtures/valid.yaml
+ run bin/kubeconform -schema-location 'https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/{{ .NormalizedKubernetesVersion }}-standalone{{ .StrictSuffix }}/{{ .ResourceKind }}{{ .KindSuffix }}.json' fixtures/valid.yaml
+ [ "$status" -eq 0 ]
+}
+
+@test "Pass when using schemas with HTTP references" {
+ run bin/kubeconform -summary -schema-location 'https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/{{ .NormalizedKubernetesVersion }}{{ .StrictSuffix }}/{{ .ResourceKind }}{{ .KindSuffix }}.json' fixtures/valid.yaml
[ "$status" -eq 0 ]
}
@@ -252,7 +257,7 @@ resetCacheFolder() {
@test "Fail when no schema found, ensure 404 is not cached on disk" {
resetCacheFolder
- run bin/kubeconform -cache cache -schema-location 'https://raw.githubusercontent.com/garethr/openshift-json-schema/master/doesnotexist.json' fixtures/valid.yaml
+ run bin/kubeconform -cache cache -schema-location 'https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/doesnotexist.json' fixtures/valid.yaml
[ "$status" -eq 1 ]
[ "$output" == 'fixtures/valid.yaml - ReplicationController bob failed validation: could not find schema for ReplicationController' ]
[ "`ls cache/ | wc -l`" -eq 0 ]
@@ -287,14 +292,14 @@ resetCacheFolder() {
@test "Fail when parsing a List that contains an invalid resource" {
run bin/kubeconform -summary fixtures/list_invalid.yaml
[ "$status" -eq 1 ]
- [ "${lines[0]}" == 'fixtures/list_invalid.yaml - ReplicationController bob is invalid: For field spec.replicas: Invalid type. Expected: [integer,null], given: string' ]
+ [ "${lines[0]}" == 'fixtures/list_invalid.yaml - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema: '\''/spec/replicas'\'' does not validate with https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#/properties/spec/properties/replicas/type: expected integer or null, but got string' ]
[ "${lines[1]}" == 'Summary: 2 resources found in 1 file - Valid: 1, Invalid: 1, Errors: 0, Skipped: 0' ]
}
@test "Fail when parsing a List that contains an invalid resource from stdin" {
run bash -c "cat fixtures/list_invalid.yaml | bin/kubeconform -summary -"
[ "$status" -eq 1 ]
- [ "${lines[0]}" == 'stdin - ReplicationController bob is invalid: For field spec.replicas: Invalid type. Expected: [integer,null], given: string' ]
+ [ "${lines[0]}" == 'stdin - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema: '\''/spec/replicas'\'' does not validate with https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#/properties/spec/properties/replicas/type: expected integer or null, but got string' ]
[ "${lines[1]}" == 'Summary: 2 resources found parsing stdin - Valid: 1, Invalid: 1, Errors: 0, Skipped: 0' ]
}
diff --git a/go.mod b/go.mod
index 84635b4..46a7e83 100644
--- a/go.mod
+++ b/go.mod
@@ -3,10 +3,8 @@ module github.com/yannh/kubeconform
go 1.17
require (
- github.com/beevik/etree v1.1.0
- github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
- github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
- github.com/xeipuuv/gojsonschema v1.2.0
- gopkg.in/yaml.v2 v2.4.0 // indirect
+ github.com/santhosh-tekuri/jsonschema/v5 v5.1.1
sigs.k8s.io/yaml v1.2.0
)
+
+require gopkg.in/yaml.v2 v2.4.0 // indirect
diff --git a/go.sum b/go.sum
index 456e485..98e235c 100644
--- a/go.sum
+++ b/go.sum
@@ -1,20 +1,7 @@
-github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs=
-github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
-github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
-github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
-github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
-github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
-github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
-github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
-github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
-github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
-github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
+github.com/santhosh-tekuri/jsonschema/v5 v5.1.1 h1:lEOLY2vyGIqKWUI9nzsOJRV3mb3WC9dXYORsLEUcoeY=
+github.com/santhosh-tekuri/jsonschema/v5 v5.1.1/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
diff --git a/pkg/registry/http.go b/pkg/registry/http.go
index db49f96..2af0c6c 100644
--- a/pkg/registry/http.go
+++ b/pkg/registry/http.go
@@ -61,15 +61,15 @@ func newHTTPRegistry(schemaPathTemplate string, cacheFolder string, strict bool,
}
// DownloadSchema downloads the schema for a particular resource from an HTTP server
-func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) ([]byte, error) {
+func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error) {
url, err := schemaPath(r.schemaPathTemplate, resourceKind, resourceAPIVersion, k8sVersion, r.strict)
if err != nil {
- return nil, err
+ return "", nil, err
}
if r.cache != nil {
if b, err := r.cache.Get(resourceKind, resourceAPIVersion, k8sVersion); err == nil {
- return b.([]byte), nil
+ return url, b.([]byte), nil
}
}
@@ -79,7 +79,7 @@ func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVers
if r.debug {
log.Println(msg)
}
- return nil, errors.New(msg)
+ return url, nil, errors.New(msg)
}
defer resp.Body.Close()
@@ -88,7 +88,7 @@ func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVers
if r.debug {
log.Print(msg)
}
- return nil, newNotFoundError(errors.New(msg))
+ return url, nil, newNotFoundError(errors.New(msg))
}
if resp.StatusCode != http.StatusOK {
@@ -96,7 +96,7 @@ func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVers
if r.debug {
log.Print(msg)
}
- return nil, fmt.Errorf(msg)
+ return url, nil, fmt.Errorf(msg)
}
body, err := ioutil.ReadAll(resp.Body)
@@ -105,7 +105,7 @@ func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVers
if r.debug {
log.Print(msg)
}
- return nil, errors.New(msg)
+ return url, nil, errors.New(msg)
}
if r.debug {
@@ -114,9 +114,9 @@ func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVers
if r.cache != nil {
if err := r.cache.Set(resourceKind, resourceAPIVersion, k8sVersion, body); err != nil {
- return nil, fmt.Errorf("failed writing schema to cache: %s", err)
+ return url, nil, fmt.Errorf("failed writing schema to cache: %s", err)
}
}
- return body, nil
+ return url, body, nil
}
diff --git a/pkg/registry/http_test.go b/pkg/registry/http_test.go
index f759a1e..1ab4791 100644
--- a/pkg/registry/http_test.go
+++ b/pkg/registry/http_test.go
@@ -100,7 +100,7 @@ func TestDownloadSchema(t *testing.T) {
strict: testCase.strict,
}
- res, err := reg.DownloadSchema(testCase.resourceKind, testCase.resourceAPIVersion, testCase.k8sversion)
+ _, res, err := reg.DownloadSchema(testCase.resourceKind, testCase.resourceAPIVersion, testCase.k8sversion)
if err == nil || testCase.expectErr == nil {
if err != testCase.expectErr {
t.Errorf("during test '%s': expected error, got:\n%s\n%s\n", testCase.name, testCase.expectErr, err)
diff --git a/pkg/registry/local.go b/pkg/registry/local.go
index 81d69e2..ec9e047 100644
--- a/pkg/registry/local.go
+++ b/pkg/registry/local.go
@@ -24,10 +24,10 @@ func newLocalRegistry(pathTemplate string, strict bool, debug bool) (*LocalRegis
}
// DownloadSchema retrieves the schema from a file for the resource
-func (r LocalRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) ([]byte, error) {
+func (r LocalRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error) {
schemaFile, err := schemaPath(r.pathTemplate, resourceKind, resourceAPIVersion, k8sVersion, r.strict)
if err != nil {
- return []byte{}, nil
+ return schemaFile, []byte{}, nil
}
f, err := os.Open(schemaFile)
if err != nil {
@@ -36,14 +36,14 @@ func (r LocalRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersi
if r.debug {
log.Print(msg)
}
- return nil, newNotFoundError(errors.New(msg))
+ return schemaFile, nil, newNotFoundError(errors.New(msg))
}
msg := fmt.Sprintf("failed to open schema at %s: %s", schemaFile, err)
if r.debug {
log.Print(msg)
}
- return nil, errors.New(msg)
+ return schemaFile, nil, errors.New(msg)
}
defer f.Close()
@@ -53,11 +53,11 @@ func (r LocalRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersi
if r.debug {
log.Print(msg)
}
- return nil, err
+ return schemaFile, nil, err
}
if r.debug {
log.Printf("using schema found at %s", schemaFile)
}
- return content, nil
+ return schemaFile, content, nil
}
diff --git a/pkg/registry/registry.go b/pkg/registry/registry.go
index afde801..154f40c 100644
--- a/pkg/registry/registry.go
+++ b/pkg/registry/registry.go
@@ -13,7 +13,7 @@ type Manifest struct {
// Registry is an interface that should be implemented by any source of Kubernetes schemas
type Registry interface {
- DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) ([]byte, error)
+ DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error)
}
// Retryable indicates whether an error is a temporary or a permanent failure
diff --git a/pkg/validator/validator.go b/pkg/validator/validator.go
index 09eeb53..34b9036 100644
--- a/pkg/validator/validator.go
+++ b/pkg/validator/validator.go
@@ -6,11 +6,11 @@ import (
"fmt"
"io"
+ jsonschema "github.com/santhosh-tekuri/jsonschema/v5"
+ _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
"github.com/yannh/kubeconform/pkg/cache"
"github.com/yannh/kubeconform/pkg/registry"
"github.com/yannh/kubeconform/pkg/resource"
-
- "github.com/xeipuuv/gojsonschema"
"sigs.k8s.io/yaml"
)
@@ -91,7 +91,7 @@ func New(schemaLocations []string, opts Opts) (Validator, error) {
type v struct {
opts Opts
schemaCache cache.Cache
- schemaDownload func(registries []registry.Registry, kind, version, k8sVersion string) (*gojsonschema.Schema, error)
+ schemaDownload func(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error)
regs []registry.Registry
}
@@ -151,13 +151,13 @@ func (val *v) ValidateResource(res resource.Resource) Result {
}
cached := false
- var schema *gojsonschema.Schema
+ var schema *jsonschema.Schema
if val.schemaCache != nil {
s, err := val.schemaCache.Get(sig.Kind, sig.Version, val.opts.KubernetesVersion)
if err == nil {
cached = true
- schema = s.(*gojsonschema.Schema)
+ schema = s.(*jsonschema.Schema)
}
}
@@ -179,28 +179,12 @@ func (val *v) ValidateResource(res resource.Resource) Result {
return Result{Resource: res, Err: fmt.Errorf("could not find schema for %s", sig.Kind), Status: Error}
}
- resourceLoader := gojsonschema.NewGoLoader(r)
-
- results, err := schema.Validate(resourceLoader)
+ err = schema.Validate(r)
if err != nil {
- // This error can only happen if the Object to validate is poorly formed. There's no hope of saving this one
- return Result{Resource: res, Status: Error, Err: fmt.Errorf("problem validating schema. Check JSON formatting: %s", err)}
- }
-
- if results.Valid() {
- return Result{Resource: res, Status: Valid}
+ return Result{Resource: res, Status: Invalid, Err: fmt.Errorf("problem validating schema. Check JSON formatting: %s", err)}
}
- msg := ""
- for _, errMsg := range results.Errors() {
- if msg != "" {
- msg += " - "
- }
- details := errMsg.Details()
- msg += fmt.Sprintf("For field %s: %s", details["field"].(string), errMsg.Description())
- }
-
- return Result{Resource: res, Status: Invalid, Err: fmt.Errorf("%s", msg)}
+ return Result{Resource: res, Status: Valid}
}
// ValidateWithContext validates resources found in r
@@ -235,15 +219,15 @@ func (val *v) Validate(filename string, r io.ReadCloser) []Result {
return val.ValidateWithContext(context.Background(), filename, r)
}
-func downloadSchema(registries []registry.Registry, kind, version, k8sVersion string) (*gojsonschema.Schema, error) {
+func downloadSchema(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error) {
var err error
var schemaBytes []byte
+ var path string
for _, reg := range registries {
- schemaBytes, err = reg.DownloadSchema(kind, version, k8sVersion)
+ path, schemaBytes, err = reg.DownloadSchema(kind, version, k8sVersion)
if err == nil {
- schema, err := gojsonschema.NewSchema(gojsonschema.NewBytesLoader(schemaBytes))
-
+ schema, err := jsonschema.CompileString(path, string(schemaBytes))
// If we got a non-parseable response, we try the next registry
if err != nil {
continue
diff --git a/pkg/validator/validator_test.go b/pkg/validator/validator_test.go
index f6fcd15..16f2c6a 100644
--- a/pkg/validator/validator_test.go
+++ b/pkg/validator/validator_test.go
@@ -9,16 +9,16 @@ import (
)
type mockRegistry struct {
- SchemaDownloader func() ([]byte, error)
+ SchemaDownloader func() (string, []byte, error)
}
-func newMockRegistry(f func() ([]byte, error)) *mockRegistry {
+func newMockRegistry(f func() (string, []byte, error)) *mockRegistry {
return &mockRegistry{
SchemaDownloader: f,
}
}
-func (m mockRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) ([]byte, error) {
+func (m mockRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error) {
return m.SchemaDownloader()
}
@@ -362,17 +362,17 @@ lastName: bar
schemaCache: nil,
schemaDownload: downloadSchema,
regs: []registry.Registry{
- newMockRegistry(func() ([]byte, error) {
- return testCase.schemaRegistry1, nil
+ newMockRegistry(func() (string, []byte, error) {
+ return "", testCase.schemaRegistry1, nil
}),
- newMockRegistry(func() ([]byte, error) {
- return testCase.schemaRegistry2, nil
+ newMockRegistry(func() (string, []byte, error) {
+ return "", testCase.schemaRegistry2, nil
}),
},
}
if got := val.ValidateResource(resource.Resource{Bytes: testCase.rawResource}); got.Status != testCase.expect {
if got.Err != nil {
- t.Errorf("%d - expected %d, got %d: %s", i, testCase.expect, got.Status, got.Err.Error())
+ t.Errorf("Test '%s' - expected %d, got %d: %s", testCase.name, testCase.expect, got.Status, got.Err.Error())
} else {
t.Errorf("%d - expected %d, got %d", i, testCase.expect, got.Status)
}
diff --git a/vendor/github.com/beevik/etree/.travis.yml b/vendor/github.com/beevik/etree/.travis.yml
deleted file mode 100644
index f4cb25d..0000000
--- a/vendor/github.com/beevik/etree/.travis.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-language: go
-sudo: false
-
-go:
- - 1.11.x
- - tip
-
-matrix:
- allow_failures:
- - go: tip
-
-script:
- - go vet ./...
- - go test -v ./...
diff --git a/vendor/github.com/beevik/etree/CONTRIBUTORS b/vendor/github.com/beevik/etree/CONTRIBUTORS
deleted file mode 100644
index 03211a8..0000000
--- a/vendor/github.com/beevik/etree/CONTRIBUTORS
+++ /dev/null
@@ -1,10 +0,0 @@
-Brett Vickers (beevik)
-Felix Geisendörfer (felixge)
-Kamil Kisiel (kisielk)
-Graham King (grahamking)
-Matt Smith (ma314smith)
-Michal Jemala (michaljemala)
-Nicolas Piganeau (npiganeau)
-Chris Brown (ccbrown)
-Earncef Sequeira (earncef)
-Gabriel de Labachelerie (wuzuf)
diff --git a/vendor/github.com/beevik/etree/LICENSE b/vendor/github.com/beevik/etree/LICENSE
deleted file mode 100644
index 26f1f77..0000000
--- a/vendor/github.com/beevik/etree/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright 2015-2019 Brett Vickers. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
- 1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
- 2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDER ``AS IS'' AND ANY
-EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
-OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/beevik/etree/README.md b/vendor/github.com/beevik/etree/README.md
deleted file mode 100644
index 08ec26b..0000000
--- a/vendor/github.com/beevik/etree/README.md
+++ /dev/null
@@ -1,205 +0,0 @@
-[](https://travis-ci.org/beevik/etree)
-[](https://godoc.org/github.com/beevik/etree)
-
-etree
-=====
-
-The etree package is a lightweight, pure go package that expresses XML in
-the form of an element tree. Its design was inspired by the Python
-[ElementTree](http://docs.python.org/2/library/xml.etree.elementtree.html)
-module.
-
-Some of the package's capabilities and features:
-
-* Represents XML documents as trees of elements for easy traversal.
-* Imports, serializes, modifies or creates XML documents from scratch.
-* Writes and reads XML to/from files, byte slices, strings and io interfaces.
-* Performs simple or complex searches with lightweight XPath-like query APIs.
-* Auto-indents XML using spaces or tabs for better readability.
-* Implemented in pure go; depends only on standard go libraries.
-* Built on top of the go [encoding/xml](http://golang.org/pkg/encoding/xml)
- package.
-
-### Creating an XML document
-
-The following example creates an XML document from scratch using the etree
-package and outputs its indented contents to stdout.
-```go
-doc := etree.NewDocument()
-doc.CreateProcInst("xml", `version="1.0" encoding="UTF-8"`)
-doc.CreateProcInst("xml-stylesheet", `type="text/xsl" href="style.xsl"`)
-
-people := doc.CreateElement("People")
-people.CreateComment("These are all known people")
-
-jon := people.CreateElement("Person")
-jon.CreateAttr("name", "Jon")
-
-sally := people.CreateElement("Person")
-sally.CreateAttr("name", "Sally")
-
-doc.Indent(2)
-doc.WriteTo(os.Stdout)
-```
-
-Output:
-```xml
-
-
-
-
-
-
-
-```
-
-### Reading an XML file
-
-Suppose you have a file on disk called `bookstore.xml` containing the
-following data:
-
-```xml
-
-
-
- Everyday Italian
- Giada De Laurentiis
- 2005
- 30.00
-
-
-
- Harry Potter
- J K. Rowling
- 2005
- 29.99
-
-
-
- XQuery Kick Start
- James McGovern
- Per Bothner
- Kurt Cagle
- James Linn
- Vaidyanathan Nagarajan
- 2003
- 49.99
-
-
-
- Learning XML
- Erik T. Ray
- 2003
- 39.95
-
-
-
-```
-
-This code reads the file's contents into an etree document.
-```go
-doc := etree.NewDocument()
-if err := doc.ReadFromFile("bookstore.xml"); err != nil {
- panic(err)
-}
-```
-
-You can also read XML from a string, a byte slice, or an `io.Reader`.
-
-### Processing elements and attributes
-
-This example illustrates several ways to access elements and attributes using
-etree selection queries.
-```go
-root := doc.SelectElement("bookstore")
-fmt.Println("ROOT element:", root.Tag)
-
-for _, book := range root.SelectElements("book") {
- fmt.Println("CHILD element:", book.Tag)
- if title := book.SelectElement("title"); title != nil {
- lang := title.SelectAttrValue("lang", "unknown")
- fmt.Printf(" TITLE: %s (%s)\n", title.Text(), lang)
- }
- for _, attr := range book.Attr {
- fmt.Printf(" ATTR: %s=%s\n", attr.Key, attr.Value)
- }
-}
-```
-Output:
-```
-ROOT element: bookstore
-CHILD element: book
- TITLE: Everyday Italian (en)
- ATTR: category=COOKING
-CHILD element: book
- TITLE: Harry Potter (en)
- ATTR: category=CHILDREN
-CHILD element: book
- TITLE: XQuery Kick Start (en)
- ATTR: category=WEB
-CHILD element: book
- TITLE: Learning XML (en)
- ATTR: category=WEB
-```
-
-### Path queries
-
-This example uses etree's path functions to select all book titles that fall
-into the category of 'WEB'. The double-slash prefix in the path causes the
-search for book elements to occur recursively; book elements may appear at any
-level of the XML hierarchy.
-```go
-for _, t := range doc.FindElements("//book[@category='WEB']/title") {
- fmt.Println("Title:", t.Text())
-}
-```
-
-Output:
-```
-Title: XQuery Kick Start
-Title: Learning XML
-```
-
-This example finds the first book element under the root bookstore element and
-outputs the tag and text of each of its child elements.
-```go
-for _, e := range doc.FindElements("./bookstore/book[1]/*") {
- fmt.Printf("%s: %s\n", e.Tag, e.Text())
-}
-```
-
-Output:
-```
-title: Everyday Italian
-author: Giada De Laurentiis
-year: 2005
-price: 30.00
-```
-
-This example finds all books with a price of 49.99 and outputs their titles.
-```go
-path := etree.MustCompilePath("./bookstore/book[p:price='49.99']/title")
-for _, e := range doc.FindElementsPath(path) {
- fmt.Println(e.Text())
-}
-```
-
-Output:
-```
-XQuery Kick Start
-```
-
-Note that this example uses the FindElementsPath function, which takes as an
-argument a pre-compiled path object. Use precompiled paths when you plan to
-search with the same path more than once.
-
-### Other features
-
-These are just a few examples of the things the etree package can do. See the
-[documentation](http://godoc.org/github.com/beevik/etree) for a complete
-description of its capabilities.
-
-### Contributing
-
-This project accepts contributions. Just fork the repo and submit a pull
-request!
diff --git a/vendor/github.com/beevik/etree/RELEASE_NOTES.md b/vendor/github.com/beevik/etree/RELEASE_NOTES.md
deleted file mode 100644
index ee59d7a..0000000
--- a/vendor/github.com/beevik/etree/RELEASE_NOTES.md
+++ /dev/null
@@ -1,109 +0,0 @@
-Release v1.1.0
-==============
-
-**New Features**
-
-* New attribute helpers.
- * Added the `Element.SortAttrs` method, which lexicographically sorts an
- element's attributes by key.
-* New `ReadSettings` properties.
- * Added `Entity` for the support of custom entity maps.
-* New `WriteSettings` properties.
- * Added `UseCRLF` to allow the output of CR-LF newlines instead of the
- default LF newlines. This is useful on Windows systems.
-* Additional support for text and CDATA sections.
- * The `Element.Text` method now returns the concatenation of all consecutive
- character data tokens immediately following an element's opening tag.
- * Added `Element.SetCData` to replace the character data immediately
- following an element's opening tag with a CDATA section.
- * Added `Element.CreateCData` to create and add a CDATA section child
- `CharData` token to an element.
- * Added `Element.CreateText` to create and add a child text `CharData` token
- to an element.
- * Added `NewCData` to create a parentless CDATA section `CharData` token.
- * Added `NewText` to create a parentless text `CharData`
- token.
- * Added `CharData.IsCData` to detect if the token contains a CDATA section.
- * Added `CharData.IsWhitespace` to detect if the token contains whitespace
- inserted by one of the document Indent functions.
- * Modified `Element.SetText` so that it replaces a run of consecutive
- character data tokens following the element's opening tag (instead of just
- the first one).
-* New "tail text" support.
- * Added the `Element.Tail` method, which returns the text immediately
- following an element's closing tag.
- * Added the `Element.SetTail` method, which modifies the text immediately
- following an element's closing tag.
-* New element child insertion and removal methods.
- * Added the `Element.InsertChildAt` method, which inserts a new child token
- before the specified child token index.
- * Added the `Element.RemoveChildAt` method, which removes the child token at
- the specified child token index.
-* New element and attribute queries.
- * Added the `Element.Index` method, which returns the element's index within
- its parent element's child token list.
- * Added the `Element.NamespaceURI` method to return the namespace URI
- associated with an element.
- * Added the `Attr.NamespaceURI` method to return the namespace URI
- associated with an element.
- * Added the `Attr.Element` method to return the element that an attribute
- belongs to.
-* New Path filter functions.
- * Added `[local-name()='val']` to keep elements whose unprefixed tag matches
- the desired value.
- * Added `[name()='val']` to keep elements whose full tag matches the desired
- value.
- * Added `[namespace-prefix()='val']` to keep elements whose namespace prefix
- matches the desired value.
- * Added `[namespace-uri()='val']` to keep elements whose namespace URI
- matches the desired value.
-
-**Bug Fixes**
-
-* A default XML `CharSetReader` is now used to prevent failed parsing of XML
- documents using certain encodings.
- ([Issue](/~https://github.com/beevik/etree/issues/53)).
-* All characters are now properly escaped according to XML parsing rules.
- ([Issue](/~https://github.com/beevik/etree/issues/55)).
-* The `Document.Indent` and `Document.IndentTabs` functions no longer insert
- empty string `CharData` tokens.
-
-**Deprecated**
-
-* `Element`
- * The `InsertChild` method is deprecated. Use `InsertChildAt` instead.
- * The `CreateCharData` method is deprecated. Use `CreateText` instead.
-* `CharData`
- * The `NewCharData` method is deprecated. Use `NewText` instead.
-
-
-Release v1.0.1
-==============
-
-**Changes**
-
-* Added support for absolute etree Path queries. An absolute path begins with
- `/` or `//` and begins its search from the element's document root.
-* Added [`GetPath`](https://godoc.org/github.com/beevik/etree#Element.GetPath)
- and [`GetRelativePath`](https://godoc.org/github.com/beevik/etree#Element.GetRelativePath)
- functions to the [`Element`](https://godoc.org/github.com/beevik/etree#Element)
- type.
-
-**Breaking changes**
-
-* A path starting with `//` is now interpreted as an absolute path.
- Previously, it was interpreted as a relative path starting from the element
- whose
- [`FindElement`](https://godoc.org/github.com/beevik/etree#Element.FindElement)
- method was called. To remain compatible with this release, all paths
- prefixed with `//` should be prefixed with `.//` when called from any
- element other than the document's root.
-* [**edit 2/1/2019**]: Minor releases should not contain breaking changes.
- Even though this breaking change was very minor, it was a mistake to include
- it in this minor release. In the future, all breaking changes will be
- limited to major releases (e.g., version 2.0.0).
-
-Release v1.0.0
-==============
-
-Initial release.
diff --git a/vendor/github.com/beevik/etree/etree.go b/vendor/github.com/beevik/etree/etree.go
deleted file mode 100644
index 9e24f90..0000000
--- a/vendor/github.com/beevik/etree/etree.go
+++ /dev/null
@@ -1,1453 +0,0 @@
-// Copyright 2015-2019 Brett Vickers.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package etree provides XML services through an Element Tree
-// abstraction.
-package etree
-
-import (
- "bufio"
- "bytes"
- "encoding/xml"
- "errors"
- "io"
- "os"
- "sort"
- "strings"
-)
-
-const (
- // NoIndent is used with Indent to disable all indenting.
- NoIndent = -1
-)
-
-// ErrXML is returned when XML parsing fails due to incorrect formatting.
-var ErrXML = errors.New("etree: invalid XML format")
-
-// ReadSettings allow for changing the default behavior of the ReadFrom*
-// methods.
-type ReadSettings struct {
- // CharsetReader to be passed to standard xml.Decoder. Default: nil.
- CharsetReader func(charset string, input io.Reader) (io.Reader, error)
-
- // Permissive allows input containing common mistakes such as missing tags
- // or attribute values. Default: false.
- Permissive bool
-
- // Entity to be passed to standard xml.Decoder. Default: nil.
- Entity map[string]string
-}
-
-// newReadSettings creates a default ReadSettings record.
-func newReadSettings() ReadSettings {
- return ReadSettings{
- CharsetReader: func(label string, input io.Reader) (io.Reader, error) {
- return input, nil
- },
- Permissive: false,
- }
-}
-
-// WriteSettings allow for changing the serialization behavior of the WriteTo*
-// methods.
-type WriteSettings struct {
- // CanonicalEndTags forces the production of XML end tags, even for
- // elements that have no child elements. Default: false.
- CanonicalEndTags bool
-
- // CanonicalText forces the production of XML character references for
- // text data characters &, <, and >. If false, XML character references
- // are also produced for " and '. Default: false.
- CanonicalText bool
-
- // CanonicalAttrVal forces the production of XML character references for
- // attribute value characters &, < and ". If false, XML character
- // references are also produced for > and '. Default: false.
- CanonicalAttrVal bool
-
- // When outputting indented XML, use a carriage return and linefeed
- // ("\r\n") as a new-line delimiter instead of just a linefeed ("\n").
- // This is useful on Windows-based systems.
- UseCRLF bool
-}
-
-// newWriteSettings creates a default WriteSettings record.
-func newWriteSettings() WriteSettings {
- return WriteSettings{
- CanonicalEndTags: false,
- CanonicalText: false,
- CanonicalAttrVal: false,
- UseCRLF: false,
- }
-}
-
-// A Token is an empty interface that represents an Element, CharData,
-// Comment, Directive, or ProcInst.
-type Token interface {
- Parent() *Element
- Index() int
- dup(parent *Element) Token
- setParent(parent *Element)
- setIndex(index int)
- writeTo(w *bufio.Writer, s *WriteSettings)
-}
-
-// A Document is a container holding a complete XML hierarchy. Its embedded
-// element contains zero or more children, one of which is usually the root
-// element. The embedded element may include other children such as
-// processing instructions or BOM CharData tokens.
-type Document struct {
- Element
- ReadSettings ReadSettings
- WriteSettings WriteSettings
-}
-
-// An Element represents an XML element, its attributes, and its child tokens.
-type Element struct {
- Space, Tag string // namespace prefix and tag
- Attr []Attr // key-value attribute pairs
- Child []Token // child tokens (elements, comments, etc.)
- parent *Element // parent element
- index int // token index in parent's children
-}
-
-// An Attr represents a key-value attribute of an XML element.
-type Attr struct {
- Space, Key string // The attribute's namespace prefix and key
- Value string // The attribute value string
- element *Element // element containing the attribute
-}
-
-// charDataFlags are used with CharData tokens to store additional settings.
-type charDataFlags uint8
-
-const (
- // The CharData was created by an indent function as whitespace.
- whitespaceFlag charDataFlags = 1 << iota
-
- // The CharData contains a CDATA section.
- cdataFlag
-)
-
-// CharData can be used to represent character data or a CDATA section within
-// an XML document.
-type CharData struct {
- Data string
- parent *Element
- index int
- flags charDataFlags
-}
-
-// A Comment represents an XML comment.
-type Comment struct {
- Data string
- parent *Element
- index int
-}
-
-// A Directive represents an XML directive.
-type Directive struct {
- Data string
- parent *Element
- index int
-}
-
-// A ProcInst represents an XML processing instruction.
-type ProcInst struct {
- Target string
- Inst string
- parent *Element
- index int
-}
-
-// NewDocument creates an XML document without a root element.
-func NewDocument() *Document {
- return &Document{
- Element{Child: make([]Token, 0)},
- newReadSettings(),
- newWriteSettings(),
- }
-}
-
-// Copy returns a recursive, deep copy of the document.
-func (d *Document) Copy() *Document {
- return &Document{*(d.dup(nil).(*Element)), d.ReadSettings, d.WriteSettings}
-}
-
-// Root returns the root element of the document, or nil if there is no root
-// element.
-func (d *Document) Root() *Element {
- for _, t := range d.Child {
- if c, ok := t.(*Element); ok {
- return c
- }
- }
- return nil
-}
-
-// SetRoot replaces the document's root element with e. If the document
-// already has a root when this function is called, then the document's
-// original root is unbound first. If the element e is bound to another
-// document (or to another element within a document), then it is unbound
-// first.
-func (d *Document) SetRoot(e *Element) {
- if e.parent != nil {
- e.parent.RemoveChild(e)
- }
-
- p := &d.Element
- e.setParent(p)
-
- // If there is already a root element, replace it.
- for i, t := range p.Child {
- if _, ok := t.(*Element); ok {
- t.setParent(nil)
- t.setIndex(-1)
- p.Child[i] = e
- e.setIndex(i)
- return
- }
- }
-
- // No existing root element, so add it.
- p.addChild(e)
-}
-
-// ReadFrom reads XML from the reader r into the document d. It returns the
-// number of bytes read and any error encountered.
-func (d *Document) ReadFrom(r io.Reader) (n int64, err error) {
- return d.Element.readFrom(r, d.ReadSettings)
-}
-
-// ReadFromFile reads XML from the string s into the document d.
-func (d *Document) ReadFromFile(filename string) error {
- f, err := os.Open(filename)
- if err != nil {
- return err
- }
- defer f.Close()
- _, err = d.ReadFrom(f)
- return err
-}
-
-// ReadFromBytes reads XML from the byte slice b into the document d.
-func (d *Document) ReadFromBytes(b []byte) error {
- _, err := d.ReadFrom(bytes.NewReader(b))
- return err
-}
-
-// ReadFromString reads XML from the string s into the document d.
-func (d *Document) ReadFromString(s string) error {
- _, err := d.ReadFrom(strings.NewReader(s))
- return err
-}
-
-// WriteTo serializes an XML document into the writer w. It
-// returns the number of bytes written and any error encountered.
-func (d *Document) WriteTo(w io.Writer) (n int64, err error) {
- cw := newCountWriter(w)
- b := bufio.NewWriter(cw)
- for _, c := range d.Child {
- c.writeTo(b, &d.WriteSettings)
- }
- err, n = b.Flush(), cw.bytes
- return
-}
-
-// WriteToFile serializes an XML document into the file named
-// filename.
-func (d *Document) WriteToFile(filename string) error {
- f, err := os.Create(filename)
- if err != nil {
- return err
- }
- defer f.Close()
- _, err = d.WriteTo(f)
- return err
-}
-
-// WriteToBytes serializes the XML document into a slice of
-// bytes.
-func (d *Document) WriteToBytes() (b []byte, err error) {
- var buf bytes.Buffer
- if _, err = d.WriteTo(&buf); err != nil {
- return
- }
- return buf.Bytes(), nil
-}
-
-// WriteToString serializes the XML document into a string.
-func (d *Document) WriteToString() (s string, err error) {
- var b []byte
- if b, err = d.WriteToBytes(); err != nil {
- return
- }
- return string(b), nil
-}
-
-type indentFunc func(depth int) string
-
-// Indent modifies the document's element tree by inserting character data
-// tokens containing newlines and indentation. The amount of indentation per
-// depth level is given as spaces. Pass etree.NoIndent for spaces if you want
-// no indentation at all.
-func (d *Document) Indent(spaces int) {
- var indent indentFunc
- switch {
- case spaces < 0:
- indent = func(depth int) string { return "" }
- case d.WriteSettings.UseCRLF == true:
- indent = func(depth int) string { return indentCRLF(depth*spaces, indentSpaces) }
- default:
- indent = func(depth int) string { return indentLF(depth*spaces, indentSpaces) }
- }
- d.Element.indent(0, indent)
-}
-
-// IndentTabs modifies the document's element tree by inserting CharData
-// tokens containing newlines and tabs for indentation. One tab is used per
-// indentation level.
-func (d *Document) IndentTabs() {
- var indent indentFunc
- switch d.WriteSettings.UseCRLF {
- case true:
- indent = func(depth int) string { return indentCRLF(depth, indentTabs) }
- default:
- indent = func(depth int) string { return indentLF(depth, indentTabs) }
- }
- d.Element.indent(0, indent)
-}
-
-// NewElement creates an unparented element with the specified tag. The tag
-// may be prefixed by a namespace prefix and a colon.
-func NewElement(tag string) *Element {
- space, stag := spaceDecompose(tag)
- return newElement(space, stag, nil)
-}
-
-// newElement is a helper function that creates an element and binds it to
-// a parent element if possible.
-func newElement(space, tag string, parent *Element) *Element {
- e := &Element{
- Space: space,
- Tag: tag,
- Attr: make([]Attr, 0),
- Child: make([]Token, 0),
- parent: parent,
- index: -1,
- }
- if parent != nil {
- parent.addChild(e)
- }
- return e
-}
-
-// Copy creates a recursive, deep copy of the element and all its attributes
-// and children. The returned element has no parent but can be parented to a
-// another element using AddElement, or to a document using SetRoot.
-func (e *Element) Copy() *Element {
- return e.dup(nil).(*Element)
-}
-
-// FullTag returns the element e's complete tag, including namespace prefix if
-// present.
-func (e *Element) FullTag() string {
- if e.Space == "" {
- return e.Tag
- }
- return e.Space + ":" + e.Tag
-}
-
-// NamespaceURI returns the XML namespace URI associated with the element. If
-// the element is part of the XML default namespace, NamespaceURI returns the
-// empty string.
-func (e *Element) NamespaceURI() string {
- if e.Space == "" {
- return e.findDefaultNamespaceURI()
- }
- return e.findLocalNamespaceURI(e.Space)
-}
-
-// findLocalNamespaceURI finds the namespace URI corresponding to the
-// requested prefix.
-func (e *Element) findLocalNamespaceURI(prefix string) string {
- for _, a := range e.Attr {
- if a.Space == "xmlns" && a.Key == prefix {
- return a.Value
- }
- }
-
- if e.parent == nil {
- return ""
- }
-
- return e.parent.findLocalNamespaceURI(prefix)
-}
-
-// findDefaultNamespaceURI finds the default namespace URI of the element.
-func (e *Element) findDefaultNamespaceURI() string {
- for _, a := range e.Attr {
- if a.Space == "" && a.Key == "xmlns" {
- return a.Value
- }
- }
-
- if e.parent == nil {
- return ""
- }
-
- return e.parent.findDefaultNamespaceURI()
-}
-
-// hasText returns true if the element has character data immediately
-// folllowing the element's opening tag.
-func (e *Element) hasText() bool {
- if len(e.Child) == 0 {
- return false
- }
- _, ok := e.Child[0].(*CharData)
- return ok
-}
-
-// namespacePrefix returns the namespace prefix associated with the element.
-func (e *Element) namespacePrefix() string {
- return e.Space
-}
-
-// name returns the tag associated with the element.
-func (e *Element) name() string {
- return e.Tag
-}
-
-// Text returns all character data immediately following the element's opening
-// tag.
-func (e *Element) Text() string {
- if len(e.Child) == 0 {
- return ""
- }
-
- text := ""
- for _, ch := range e.Child {
- if cd, ok := ch.(*CharData); ok {
- if text == "" {
- text = cd.Data
- } else {
- text = text + cd.Data
- }
- } else {
- break
- }
- }
- return text
-}
-
-// SetText replaces all character data immediately following an element's
-// opening tag with the requested string.
-func (e *Element) SetText(text string) {
- e.replaceText(0, text, 0)
-}
-
-// SetCData replaces all character data immediately following an element's
-// opening tag with a CDATA section.
-func (e *Element) SetCData(text string) {
- e.replaceText(0, text, cdataFlag)
-}
-
-// Tail returns all character data immediately following the element's end
-// tag.
-func (e *Element) Tail() string {
- if e.Parent() == nil {
- return ""
- }
-
- p := e.Parent()
- i := e.Index()
-
- text := ""
- for _, ch := range p.Child[i+1:] {
- if cd, ok := ch.(*CharData); ok {
- if text == "" {
- text = cd.Data
- } else {
- text = text + cd.Data
- }
- } else {
- break
- }
- }
- return text
-}
-
-// SetTail replaces all character data immediately following the element's end
-// tag with the requested string.
-func (e *Element) SetTail(text string) {
- if e.Parent() == nil {
- return
- }
-
- p := e.Parent()
- p.replaceText(e.Index()+1, text, 0)
-}
-
-// replaceText is a helper function that replaces a series of chardata tokens
-// starting at index i with the requested text.
-func (e *Element) replaceText(i int, text string, flags charDataFlags) {
- end := e.findTermCharDataIndex(i)
-
- switch {
- case end == i:
- if text != "" {
- // insert a new chardata token at index i
- cd := newCharData(text, flags, nil)
- e.InsertChildAt(i, cd)
- }
-
- case end == i+1:
- if text == "" {
- // remove the chardata token at index i
- e.RemoveChildAt(i)
- } else {
- // replace the first and only character token at index i
- cd := e.Child[i].(*CharData)
- cd.Data, cd.flags = text, flags
- }
-
- default:
- if text == "" {
- // remove all chardata tokens starting from index i
- copy(e.Child[i:], e.Child[end:])
- removed := end - i
- e.Child = e.Child[:len(e.Child)-removed]
- for j := i; j < len(e.Child); j++ {
- e.Child[j].setIndex(j)
- }
- } else {
- // replace the first chardata token at index i and remove all
- // subsequent chardata tokens
- cd := e.Child[i].(*CharData)
- cd.Data, cd.flags = text, flags
- copy(e.Child[i+1:], e.Child[end:])
- removed := end - (i + 1)
- e.Child = e.Child[:len(e.Child)-removed]
- for j := i + 1; j < len(e.Child); j++ {
- e.Child[j].setIndex(j)
- }
- }
- }
-}
-
-// findTermCharDataIndex finds the index of the first child token that isn't
-// a CharData token. It starts from the requested start index.
-func (e *Element) findTermCharDataIndex(start int) int {
- for i := start; i < len(e.Child); i++ {
- if _, ok := e.Child[i].(*CharData); !ok {
- return i
- }
- }
- return len(e.Child)
-}
-
-// CreateElement creates an element with the specified tag and adds it as the
-// last child element of the element e. The tag may be prefixed by a namespace
-// prefix and a colon.
-func (e *Element) CreateElement(tag string) *Element {
- space, stag := spaceDecompose(tag)
- return newElement(space, stag, e)
-}
-
-// AddChild adds the token t as the last child of element e. If token t was
-// already the child of another element, it is first removed from its current
-// parent element.
-func (e *Element) AddChild(t Token) {
- if t.Parent() != nil {
- t.Parent().RemoveChild(t)
- }
-
- t.setParent(e)
- e.addChild(t)
-}
-
-// InsertChild inserts the token t before e's existing child token ex. If ex
-// is nil or ex is not a child of e, then t is added to the end of e's child
-// token list. If token t was already the child of another element, it is
-// first removed from its current parent element.
-//
-// Deprecated: InsertChild is deprecated. Use InsertChildAt instead.
-func (e *Element) InsertChild(ex Token, t Token) {
- if ex == nil || ex.Parent() != e {
- e.AddChild(t)
- return
- }
-
- if t.Parent() != nil {
- t.Parent().RemoveChild(t)
- }
-
- t.setParent(e)
-
- i := ex.Index()
- e.Child = append(e.Child, nil)
- copy(e.Child[i+1:], e.Child[i:])
- e.Child[i] = t
-
- for j := i; j < len(e.Child); j++ {
- e.Child[j].setIndex(j)
- }
-}
-
-// InsertChildAt inserts the token t into the element e's list of child tokens
-// just before the requested index. If the index is greater than or equal to
-// the length of the list of child tokens, the token t is added to the end of
-// the list.
-func (e *Element) InsertChildAt(index int, t Token) {
- if index >= len(e.Child) {
- e.AddChild(t)
- return
- }
-
- if t.Parent() != nil {
- if t.Parent() == e && t.Index() > index {
- index--
- }
- t.Parent().RemoveChild(t)
- }
-
- t.setParent(e)
-
- e.Child = append(e.Child, nil)
- copy(e.Child[index+1:], e.Child[index:])
- e.Child[index] = t
-
- for j := index; j < len(e.Child); j++ {
- e.Child[j].setIndex(j)
- }
-}
-
-// RemoveChild attempts to remove the token t from element e's list of
-// children. If the token t is a child of e, then it is returned. Otherwise,
-// nil is returned.
-func (e *Element) RemoveChild(t Token) Token {
- if t.Parent() != e {
- return nil
- }
- return e.RemoveChildAt(t.Index())
-}
-
-// RemoveChildAt removes the index-th child token from the element e. The
-// removed child token is returned. If the index is out of bounds, no child is
-// removed and nil is returned.
-func (e *Element) RemoveChildAt(index int) Token {
- if index >= len(e.Child) {
- return nil
- }
-
- t := e.Child[index]
- for j := index + 1; j < len(e.Child); j++ {
- e.Child[j].setIndex(j - 1)
- }
- e.Child = append(e.Child[:index], e.Child[index+1:]...)
- t.setIndex(-1)
- t.setParent(nil)
- return t
-}
-
-// ReadFrom reads XML from the reader r and stores the result as a new child
-// of element e.
-func (e *Element) readFrom(ri io.Reader, settings ReadSettings) (n int64, err error) {
- r := newCountReader(ri)
- dec := xml.NewDecoder(r)
- dec.CharsetReader = settings.CharsetReader
- dec.Strict = !settings.Permissive
- dec.Entity = settings.Entity
- var stack stack
- stack.push(e)
- for {
- t, err := dec.RawToken()
- switch {
- case err == io.EOF:
- return r.bytes, nil
- case err != nil:
- return r.bytes, err
- case stack.empty():
- return r.bytes, ErrXML
- }
-
- top := stack.peek().(*Element)
-
- switch t := t.(type) {
- case xml.StartElement:
- e := newElement(t.Name.Space, t.Name.Local, top)
- for _, a := range t.Attr {
- e.createAttr(a.Name.Space, a.Name.Local, a.Value, e)
- }
- stack.push(e)
- case xml.EndElement:
- stack.pop()
- case xml.CharData:
- data := string(t)
- var flags charDataFlags
- if isWhitespace(data) {
- flags = whitespaceFlag
- }
- newCharData(data, flags, top)
- case xml.Comment:
- newComment(string(t), top)
- case xml.Directive:
- newDirective(string(t), top)
- case xml.ProcInst:
- newProcInst(t.Target, string(t.Inst), top)
- }
- }
-}
-
-// SelectAttr finds an element attribute matching the requested key and
-// returns it if found. Returns nil if no matching attribute is found. The key
-// may be prefixed by a namespace prefix and a colon.
-func (e *Element) SelectAttr(key string) *Attr {
- space, skey := spaceDecompose(key)
- for i, a := range e.Attr {
- if spaceMatch(space, a.Space) && skey == a.Key {
- return &e.Attr[i]
- }
- }
- return nil
-}
-
-// SelectAttrValue finds an element attribute matching the requested key and
-// returns its value if found. The key may be prefixed by a namespace prefix
-// and a colon. If the key is not found, the dflt value is returned instead.
-func (e *Element) SelectAttrValue(key, dflt string) string {
- space, skey := spaceDecompose(key)
- for _, a := range e.Attr {
- if spaceMatch(space, a.Space) && skey == a.Key {
- return a.Value
- }
- }
- return dflt
-}
-
-// ChildElements returns all elements that are children of element e.
-func (e *Element) ChildElements() []*Element {
- var elements []*Element
- for _, t := range e.Child {
- if c, ok := t.(*Element); ok {
- elements = append(elements, c)
- }
- }
- return elements
-}
-
-// SelectElement returns the first child element with the given tag. The tag
-// may be prefixed by a namespace prefix and a colon. Returns nil if no
-// element with a matching tag was found.
-func (e *Element) SelectElement(tag string) *Element {
- space, stag := spaceDecompose(tag)
- for _, t := range e.Child {
- if c, ok := t.(*Element); ok && spaceMatch(space, c.Space) && stag == c.Tag {
- return c
- }
- }
- return nil
-}
-
-// SelectElements returns a slice of all child elements with the given tag.
-// The tag may be prefixed by a namespace prefix and a colon.
-func (e *Element) SelectElements(tag string) []*Element {
- space, stag := spaceDecompose(tag)
- var elements []*Element
- for _, t := range e.Child {
- if c, ok := t.(*Element); ok && spaceMatch(space, c.Space) && stag == c.Tag {
- elements = append(elements, c)
- }
- }
- return elements
-}
-
-// FindElement returns the first element matched by the XPath-like path
-// string. Returns nil if no element is found using the path. Panics if an
-// invalid path string is supplied.
-func (e *Element) FindElement(path string) *Element {
- return e.FindElementPath(MustCompilePath(path))
-}
-
-// FindElementPath returns the first element matched by the XPath-like path
-// string. Returns nil if no element is found using the path.
-func (e *Element) FindElementPath(path Path) *Element {
- p := newPather()
- elements := p.traverse(e, path)
- switch {
- case len(elements) > 0:
- return elements[0]
- default:
- return nil
- }
-}
-
-// FindElements returns a slice of elements matched by the XPath-like path
-// string. Panics if an invalid path string is supplied.
-func (e *Element) FindElements(path string) []*Element {
- return e.FindElementsPath(MustCompilePath(path))
-}
-
-// FindElementsPath returns a slice of elements matched by the Path object.
-func (e *Element) FindElementsPath(path Path) []*Element {
- p := newPather()
- return p.traverse(e, path)
-}
-
-// GetPath returns the absolute path of the element.
-func (e *Element) GetPath() string {
- path := []string{}
- for seg := e; seg != nil; seg = seg.Parent() {
- if seg.Tag != "" {
- path = append(path, seg.Tag)
- }
- }
-
- // Reverse the path.
- for i, j := 0, len(path)-1; i < j; i, j = i+1, j-1 {
- path[i], path[j] = path[j], path[i]
- }
-
- return "/" + strings.Join(path, "/")
-}
-
-// GetRelativePath returns the path of the element relative to the source
-// element. If the two elements are not part of the same element tree, then
-// GetRelativePath returns the empty string.
-func (e *Element) GetRelativePath(source *Element) string {
- var path []*Element
-
- if source == nil {
- return ""
- }
-
- // Build a reverse path from the element toward the root. Stop if the
- // source element is encountered.
- var seg *Element
- for seg = e; seg != nil && seg != source; seg = seg.Parent() {
- path = append(path, seg)
- }
-
- // If we found the source element, reverse the path and compose the
- // string.
- if seg == source {
- if len(path) == 0 {
- return "."
- }
- parts := []string{}
- for i := len(path) - 1; i >= 0; i-- {
- parts = append(parts, path[i].Tag)
- }
- return "./" + strings.Join(parts, "/")
- }
-
- // The source wasn't encountered, so climb from the source element toward
- // the root of the tree until an element in the reversed path is
- // encountered.
-
- findPathIndex := func(e *Element, path []*Element) int {
- for i, ee := range path {
- if e == ee {
- return i
- }
- }
- return -1
- }
-
- climb := 0
- for seg = source; seg != nil; seg = seg.Parent() {
- i := findPathIndex(seg, path)
- if i >= 0 {
- path = path[:i] // truncate at found segment
- break
- }
- climb++
- }
-
- // No element in the reversed path was encountered, so the two elements
- // must not be part of the same tree.
- if seg == nil {
- return ""
- }
-
- // Reverse the (possibly truncated) path and prepend ".." segments to
- // climb.
- parts := []string{}
- for i := 0; i < climb; i++ {
- parts = append(parts, "..")
- }
- for i := len(path) - 1; i >= 0; i-- {
- parts = append(parts, path[i].Tag)
- }
- return strings.Join(parts, "/")
-}
-
-// indent recursively inserts proper indentation between an
-// XML element's child tokens.
-func (e *Element) indent(depth int, indent indentFunc) {
- e.stripIndent()
- n := len(e.Child)
- if n == 0 {
- return
- }
-
- oldChild := e.Child
- e.Child = make([]Token, 0, n*2+1)
- isCharData, firstNonCharData := false, true
- for _, c := range oldChild {
- // Insert NL+indent before child if it's not character data.
- // Exceptions: when it's the first non-character-data child, or when
- // the child is at root depth.
- _, isCharData = c.(*CharData)
- if !isCharData {
- if !firstNonCharData || depth > 0 {
- s := indent(depth)
- if s != "" {
- newCharData(s, whitespaceFlag, e)
- }
- }
- firstNonCharData = false
- }
-
- e.addChild(c)
-
- // Recursively process child elements.
- if ce, ok := c.(*Element); ok {
- ce.indent(depth+1, indent)
- }
- }
-
- // Insert NL+indent before the last child.
- if !isCharData {
- if !firstNonCharData || depth > 0 {
- s := indent(depth - 1)
- if s != "" {
- newCharData(s, whitespaceFlag, e)
- }
- }
- }
-}
-
-// stripIndent removes any previously inserted indentation.
-func (e *Element) stripIndent() {
- // Count the number of non-indent child tokens
- n := len(e.Child)
- for _, c := range e.Child {
- if cd, ok := c.(*CharData); ok && cd.IsWhitespace() {
- n--
- }
- }
- if n == len(e.Child) {
- return
- }
-
- // Strip out indent CharData
- newChild := make([]Token, n)
- j := 0
- for _, c := range e.Child {
- if cd, ok := c.(*CharData); ok && cd.IsWhitespace() {
- continue
- }
- newChild[j] = c
- newChild[j].setIndex(j)
- j++
- }
- e.Child = newChild
-}
-
-// dup duplicates the element.
-func (e *Element) dup(parent *Element) Token {
- ne := &Element{
- Space: e.Space,
- Tag: e.Tag,
- Attr: make([]Attr, len(e.Attr)),
- Child: make([]Token, len(e.Child)),
- parent: parent,
- index: e.index,
- }
- for i, t := range e.Child {
- ne.Child[i] = t.dup(ne)
- }
- for i, a := range e.Attr {
- ne.Attr[i] = a
- }
- return ne
-}
-
-// Parent returns the element token's parent element, or nil if it has no
-// parent.
-func (e *Element) Parent() *Element {
- return e.parent
-}
-
-// Index returns the index of this element within its parent element's
-// list of child tokens. If this element has no parent element, the index
-// is -1.
-func (e *Element) Index() int {
- return e.index
-}
-
-// setParent replaces the element token's parent.
-func (e *Element) setParent(parent *Element) {
- e.parent = parent
-}
-
-// setIndex sets the element token's index within its parent's Child slice.
-func (e *Element) setIndex(index int) {
- e.index = index
-}
-
-// writeTo serializes the element to the writer w.
-func (e *Element) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteByte('<')
- w.WriteString(e.FullTag())
- for _, a := range e.Attr {
- w.WriteByte(' ')
- a.writeTo(w, s)
- }
- if len(e.Child) > 0 {
- w.WriteString(">")
- for _, c := range e.Child {
- c.writeTo(w, s)
- }
- w.Write([]byte{'<', '/'})
- w.WriteString(e.FullTag())
- w.WriteByte('>')
- } else {
- if s.CanonicalEndTags {
- w.Write([]byte{'>', '<', '/'})
- w.WriteString(e.FullTag())
- w.WriteByte('>')
- } else {
- w.Write([]byte{'/', '>'})
- }
- }
-}
-
-// addChild adds a child token to the element e.
-func (e *Element) addChild(t Token) {
- t.setIndex(len(e.Child))
- e.Child = append(e.Child, t)
-}
-
-// CreateAttr creates an attribute and adds it to element e. The key may be
-// prefixed by a namespace prefix and a colon. If an attribute with the key
-// already exists, its value is replaced.
-func (e *Element) CreateAttr(key, value string) *Attr {
- space, skey := spaceDecompose(key)
- return e.createAttr(space, skey, value, e)
-}
-
-// createAttr is a helper function that creates attributes.
-func (e *Element) createAttr(space, key, value string, parent *Element) *Attr {
- for i, a := range e.Attr {
- if space == a.Space && key == a.Key {
- e.Attr[i].Value = value
- return &e.Attr[i]
- }
- }
- a := Attr{
- Space: space,
- Key: key,
- Value: value,
- element: parent,
- }
- e.Attr = append(e.Attr, a)
- return &e.Attr[len(e.Attr)-1]
-}
-
-// RemoveAttr removes and returns a copy of the first attribute of the element
-// whose key matches the given key. The key may be prefixed by a namespace
-// prefix and a colon. If a matching attribute does not exist, nil is
-// returned.
-func (e *Element) RemoveAttr(key string) *Attr {
- space, skey := spaceDecompose(key)
- for i, a := range e.Attr {
- if space == a.Space && skey == a.Key {
- e.Attr = append(e.Attr[0:i], e.Attr[i+1:]...)
- return &Attr{
- Space: a.Space,
- Key: a.Key,
- Value: a.Value,
- element: nil,
- }
- }
- }
- return nil
-}
-
-// SortAttrs sorts the element's attributes lexicographically by key.
-func (e *Element) SortAttrs() {
- sort.Sort(byAttr(e.Attr))
-}
-
-type byAttr []Attr
-
-func (a byAttr) Len() int {
- return len(a)
-}
-
-func (a byAttr) Swap(i, j int) {
- a[i], a[j] = a[j], a[i]
-}
-
-func (a byAttr) Less(i, j int) bool {
- sp := strings.Compare(a[i].Space, a[j].Space)
- if sp == 0 {
- return strings.Compare(a[i].Key, a[j].Key) < 0
- }
- return sp < 0
-}
-
-// FullKey returns the attribute a's complete key, including namespace prefix
-// if present.
-func (a *Attr) FullKey() string {
- if a.Space == "" {
- return a.Key
- }
- return a.Space + ":" + a.Key
-}
-
-// Element returns the element containing the attribute.
-func (a *Attr) Element() *Element {
- return a.element
-}
-
-// NamespaceURI returns the XML namespace URI associated with the attribute.
-// If the element is part of the XML default namespace, NamespaceURI returns
-// the empty string.
-func (a *Attr) NamespaceURI() string {
- return a.element.NamespaceURI()
-}
-
-// writeTo serializes the attribute to the writer.
-func (a *Attr) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteString(a.FullKey())
- w.WriteString(`="`)
- var m escapeMode
- if s.CanonicalAttrVal {
- m = escapeCanonicalAttr
- } else {
- m = escapeNormal
- }
- escapeString(w, a.Value, m)
- w.WriteByte('"')
-}
-
-// NewText creates a parentless CharData token containing character data.
-func NewText(text string) *CharData {
- return newCharData(text, 0, nil)
-}
-
-// NewCData creates a parentless XML character CDATA section.
-func NewCData(data string) *CharData {
- return newCharData(data, cdataFlag, nil)
-}
-
-// NewCharData creates a parentless CharData token containing character data.
-//
-// Deprecated: NewCharData is deprecated. Instead, use NewText, which does the
-// same thing.
-func NewCharData(data string) *CharData {
- return newCharData(data, 0, nil)
-}
-
-// newCharData creates a character data token and binds it to a parent
-// element. If parent is nil, the CharData token remains unbound.
-func newCharData(data string, flags charDataFlags, parent *Element) *CharData {
- c := &CharData{
- Data: data,
- parent: parent,
- index: -1,
- flags: flags,
- }
- if parent != nil {
- parent.addChild(c)
- }
- return c
-}
-
-// CreateText creates a CharData token containing character data and adds it
-// as a child of element e.
-func (e *Element) CreateText(text string) *CharData {
- return newCharData(text, 0, e)
-}
-
-// CreateCData creates a CharData token containing a CDATA section and adds it
-// as a child of element e.
-func (e *Element) CreateCData(data string) *CharData {
- return newCharData(data, cdataFlag, e)
-}
-
-// CreateCharData creates a CharData token containing character data and adds
-// it as a child of element e.
-//
-// Deprecated: CreateCharData is deprecated. Instead, use CreateText, which
-// does the same thing.
-func (e *Element) CreateCharData(data string) *CharData {
- return newCharData(data, 0, e)
-}
-
-// dup duplicates the character data.
-func (c *CharData) dup(parent *Element) Token {
- return &CharData{
- Data: c.Data,
- flags: c.flags,
- parent: parent,
- index: c.index,
- }
-}
-
-// IsCData returns true if the character data token is to be encoded as a
-// CDATA section.
-func (c *CharData) IsCData() bool {
- return (c.flags & cdataFlag) != 0
-}
-
-// IsWhitespace returns true if the character data token was created by one of
-// the document Indent methods to contain only whitespace.
-func (c *CharData) IsWhitespace() bool {
- return (c.flags & whitespaceFlag) != 0
-}
-
-// Parent returns the character data token's parent element, or nil if it has
-// no parent.
-func (c *CharData) Parent() *Element {
- return c.parent
-}
-
-// Index returns the index of this CharData token within its parent element's
-// list of child tokens. If this CharData token has no parent element, the
-// index is -1.
-func (c *CharData) Index() int {
- return c.index
-}
-
-// setParent replaces the character data token's parent.
-func (c *CharData) setParent(parent *Element) {
- c.parent = parent
-}
-
-// setIndex sets the CharData token's index within its parent element's Child
-// slice.
-func (c *CharData) setIndex(index int) {
- c.index = index
-}
-
-// writeTo serializes character data to the writer.
-func (c *CharData) writeTo(w *bufio.Writer, s *WriteSettings) {
- if c.IsCData() {
- w.WriteString(``)
- } else {
- var m escapeMode
- if s.CanonicalText {
- m = escapeCanonicalText
- } else {
- m = escapeNormal
- }
- escapeString(w, c.Data, m)
- }
-}
-
-// NewComment creates a parentless XML comment.
-func NewComment(comment string) *Comment {
- return newComment(comment, nil)
-}
-
-// NewComment creates an XML comment and binds it to a parent element. If
-// parent is nil, the Comment remains unbound.
-func newComment(comment string, parent *Element) *Comment {
- c := &Comment{
- Data: comment,
- parent: parent,
- index: -1,
- }
- if parent != nil {
- parent.addChild(c)
- }
- return c
-}
-
-// CreateComment creates an XML comment and adds it as a child of element e.
-func (e *Element) CreateComment(comment string) *Comment {
- return newComment(comment, e)
-}
-
-// dup duplicates the comment.
-func (c *Comment) dup(parent *Element) Token {
- return &Comment{
- Data: c.Data,
- parent: parent,
- index: c.index,
- }
-}
-
-// Parent returns comment token's parent element, or nil if it has no parent.
-func (c *Comment) Parent() *Element {
- return c.parent
-}
-
-// Index returns the index of this Comment token within its parent element's
-// list of child tokens. If this Comment token has no parent element, the
-// index is -1.
-func (c *Comment) Index() int {
- return c.index
-}
-
-// setParent replaces the comment token's parent.
-func (c *Comment) setParent(parent *Element) {
- c.parent = parent
-}
-
-// setIndex sets the Comment token's index within its parent element's Child
-// slice.
-func (c *Comment) setIndex(index int) {
- c.index = index
-}
-
-// writeTo serialies the comment to the writer.
-func (c *Comment) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteString("")
-}
-
-// NewDirective creates a parentless XML directive.
-func NewDirective(data string) *Directive {
- return newDirective(data, nil)
-}
-
-// newDirective creates an XML directive and binds it to a parent element. If
-// parent is nil, the Directive remains unbound.
-func newDirective(data string, parent *Element) *Directive {
- d := &Directive{
- Data: data,
- parent: parent,
- index: -1,
- }
- if parent != nil {
- parent.addChild(d)
- }
- return d
-}
-
-// CreateDirective creates an XML directive and adds it as the last child of
-// element e.
-func (e *Element) CreateDirective(data string) *Directive {
- return newDirective(data, e)
-}
-
-// dup duplicates the directive.
-func (d *Directive) dup(parent *Element) Token {
- return &Directive{
- Data: d.Data,
- parent: parent,
- index: d.index,
- }
-}
-
-// Parent returns directive token's parent element, or nil if it has no
-// parent.
-func (d *Directive) Parent() *Element {
- return d.parent
-}
-
-// Index returns the index of this Directive token within its parent element's
-// list of child tokens. If this Directive token has no parent element, the
-// index is -1.
-func (d *Directive) Index() int {
- return d.index
-}
-
-// setParent replaces the directive token's parent.
-func (d *Directive) setParent(parent *Element) {
- d.parent = parent
-}
-
-// setIndex sets the Directive token's index within its parent element's Child
-// slice.
-func (d *Directive) setIndex(index int) {
- d.index = index
-}
-
-// writeTo serializes the XML directive to the writer.
-func (d *Directive) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteString("")
-}
-
-// NewProcInst creates a parentless XML processing instruction.
-func NewProcInst(target, inst string) *ProcInst {
- return newProcInst(target, inst, nil)
-}
-
-// newProcInst creates an XML processing instruction and binds it to a parent
-// element. If parent is nil, the ProcInst remains unbound.
-func newProcInst(target, inst string, parent *Element) *ProcInst {
- p := &ProcInst{
- Target: target,
- Inst: inst,
- parent: parent,
- index: -1,
- }
- if parent != nil {
- parent.addChild(p)
- }
- return p
-}
-
-// CreateProcInst creates a processing instruction and adds it as a child of
-// element e.
-func (e *Element) CreateProcInst(target, inst string) *ProcInst {
- return newProcInst(target, inst, e)
-}
-
-// dup duplicates the procinst.
-func (p *ProcInst) dup(parent *Element) Token {
- return &ProcInst{
- Target: p.Target,
- Inst: p.Inst,
- parent: parent,
- index: p.index,
- }
-}
-
-// Parent returns processing instruction token's parent element, or nil if it
-// has no parent.
-func (p *ProcInst) Parent() *Element {
- return p.parent
-}
-
-// Index returns the index of this ProcInst token within its parent element's
-// list of child tokens. If this ProcInst token has no parent element, the
-// index is -1.
-func (p *ProcInst) Index() int {
- return p.index
-}
-
-// setParent replaces the processing instruction token's parent.
-func (p *ProcInst) setParent(parent *Element) {
- p.parent = parent
-}
-
-// setIndex sets the processing instruction token's index within its parent
-// element's Child slice.
-func (p *ProcInst) setIndex(index int) {
- p.index = index
-}
-
-// writeTo serializes the processing instruction to the writer.
-func (p *ProcInst) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteString("")
- w.WriteString(p.Target)
- if p.Inst != "" {
- w.WriteByte(' ')
- w.WriteString(p.Inst)
- }
- w.WriteString("?>")
-}
diff --git a/vendor/github.com/beevik/etree/helpers.go b/vendor/github.com/beevik/etree/helpers.go
deleted file mode 100644
index 825e14e..0000000
--- a/vendor/github.com/beevik/etree/helpers.go
+++ /dev/null
@@ -1,276 +0,0 @@
-// Copyright 2015-2019 Brett Vickers.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package etree
-
-import (
- "bufio"
- "io"
- "strings"
- "unicode/utf8"
-)
-
-// A simple stack
-type stack struct {
- data []interface{}
-}
-
-func (s *stack) empty() bool {
- return len(s.data) == 0
-}
-
-func (s *stack) push(value interface{}) {
- s.data = append(s.data, value)
-}
-
-func (s *stack) pop() interface{} {
- value := s.data[len(s.data)-1]
- s.data[len(s.data)-1] = nil
- s.data = s.data[:len(s.data)-1]
- return value
-}
-
-func (s *stack) peek() interface{} {
- return s.data[len(s.data)-1]
-}
-
-// A fifo is a simple first-in-first-out queue.
-type fifo struct {
- data []interface{}
- head, tail int
-}
-
-func (f *fifo) add(value interface{}) {
- if f.len()+1 >= len(f.data) {
- f.grow()
- }
- f.data[f.tail] = value
- if f.tail++; f.tail == len(f.data) {
- f.tail = 0
- }
-}
-
-func (f *fifo) remove() interface{} {
- value := f.data[f.head]
- f.data[f.head] = nil
- if f.head++; f.head == len(f.data) {
- f.head = 0
- }
- return value
-}
-
-func (f *fifo) len() int {
- if f.tail >= f.head {
- return f.tail - f.head
- }
- return len(f.data) - f.head + f.tail
-}
-
-func (f *fifo) grow() {
- c := len(f.data) * 2
- if c == 0 {
- c = 4
- }
- buf, count := make([]interface{}, c), f.len()
- if f.tail >= f.head {
- copy(buf[0:count], f.data[f.head:f.tail])
- } else {
- hindex := len(f.data) - f.head
- copy(buf[0:hindex], f.data[f.head:])
- copy(buf[hindex:count], f.data[:f.tail])
- }
- f.data, f.head, f.tail = buf, 0, count
-}
-
-// countReader implements a proxy reader that counts the number of
-// bytes read from its encapsulated reader.
-type countReader struct {
- r io.Reader
- bytes int64
-}
-
-func newCountReader(r io.Reader) *countReader {
- return &countReader{r: r}
-}
-
-func (cr *countReader) Read(p []byte) (n int, err error) {
- b, err := cr.r.Read(p)
- cr.bytes += int64(b)
- return b, err
-}
-
-// countWriter implements a proxy writer that counts the number of
-// bytes written by its encapsulated writer.
-type countWriter struct {
- w io.Writer
- bytes int64
-}
-
-func newCountWriter(w io.Writer) *countWriter {
- return &countWriter{w: w}
-}
-
-func (cw *countWriter) Write(p []byte) (n int, err error) {
- b, err := cw.w.Write(p)
- cw.bytes += int64(b)
- return b, err
-}
-
-// isWhitespace returns true if the byte slice contains only
-// whitespace characters.
-func isWhitespace(s string) bool {
- for i := 0; i < len(s); i++ {
- if c := s[i]; c != ' ' && c != '\t' && c != '\n' && c != '\r' {
- return false
- }
- }
- return true
-}
-
-// spaceMatch returns true if namespace a is the empty string
-// or if namespace a equals namespace b.
-func spaceMatch(a, b string) bool {
- switch {
- case a == "":
- return true
- default:
- return a == b
- }
-}
-
-// spaceDecompose breaks a namespace:tag identifier at the ':'
-// and returns the two parts.
-func spaceDecompose(str string) (space, key string) {
- colon := strings.IndexByte(str, ':')
- if colon == -1 {
- return "", str
- }
- return str[:colon], str[colon+1:]
-}
-
-// Strings used by indentCRLF and indentLF
-const (
- indentSpaces = "\r\n "
- indentTabs = "\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t"
-)
-
-// indentCRLF returns a CRLF newline followed by n copies of the first
-// non-CRLF character in the source string.
-func indentCRLF(n int, source string) string {
- switch {
- case n < 0:
- return source[:2]
- case n < len(source)-1:
- return source[:n+2]
- default:
- return source + strings.Repeat(source[2:3], n-len(source)+2)
- }
-}
-
-// indentLF returns a LF newline followed by n copies of the first non-LF
-// character in the source string.
-func indentLF(n int, source string) string {
- switch {
- case n < 0:
- return source[1:2]
- case n < len(source)-1:
- return source[1 : n+2]
- default:
- return source[1:] + strings.Repeat(source[2:3], n-len(source)+2)
- }
-}
-
-// nextIndex returns the index of the next occurrence of sep in s,
-// starting from offset. It returns -1 if the sep string is not found.
-func nextIndex(s, sep string, offset int) int {
- switch i := strings.Index(s[offset:], sep); i {
- case -1:
- return -1
- default:
- return offset + i
- }
-}
-
-// isInteger returns true if the string s contains an integer.
-func isInteger(s string) bool {
- for i := 0; i < len(s); i++ {
- if (s[i] < '0' || s[i] > '9') && !(i == 0 && s[i] == '-') {
- return false
- }
- }
- return true
-}
-
-type escapeMode byte
-
-const (
- escapeNormal escapeMode = iota
- escapeCanonicalText
- escapeCanonicalAttr
-)
-
-// escapeString writes an escaped version of a string to the writer.
-func escapeString(w *bufio.Writer, s string, m escapeMode) {
- var esc []byte
- last := 0
- for i := 0; i < len(s); {
- r, width := utf8.DecodeRuneInString(s[i:])
- i += width
- switch r {
- case '&':
- esc = []byte("&")
- case '<':
- esc = []byte("<")
- case '>':
- if m == escapeCanonicalAttr {
- continue
- }
- esc = []byte(">")
- case '\'':
- if m != escapeNormal {
- continue
- }
- esc = []byte("'")
- case '"':
- if m == escapeCanonicalText {
- continue
- }
- esc = []byte(""")
- case '\t':
- if m != escapeCanonicalAttr {
- continue
- }
- esc = []byte(" ")
- case '\n':
- if m != escapeCanonicalAttr {
- continue
- }
- esc = []byte("
")
- case '\r':
- if m == escapeNormal {
- continue
- }
- esc = []byte("
")
- default:
- if !isInCharacterRange(r) || (r == 0xFFFD && width == 1) {
- esc = []byte("\uFFFD")
- break
- }
- continue
- }
- w.WriteString(s[last : i-width])
- w.Write(esc)
- last = i
- }
- w.WriteString(s[last:])
-}
-
-func isInCharacterRange(r rune) bool {
- return r == 0x09 ||
- r == 0x0A ||
- r == 0x0D ||
- r >= 0x20 && r <= 0xD7FF ||
- r >= 0xE000 && r <= 0xFFFD ||
- r >= 0x10000 && r <= 0x10FFFF
-}
diff --git a/vendor/github.com/beevik/etree/path.go b/vendor/github.com/beevik/etree/path.go
deleted file mode 100644
index 82db0ac..0000000
--- a/vendor/github.com/beevik/etree/path.go
+++ /dev/null
@@ -1,582 +0,0 @@
-// Copyright 2015-2019 Brett Vickers.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package etree
-
-import (
- "strconv"
- "strings"
-)
-
-/*
-A Path is a string that represents a search path through an etree starting
-from the document root or an arbitrary element. Paths are used with the
-Element object's Find* methods to locate and return desired elements.
-
-A Path consists of a series of slash-separated "selectors", each of which may
-be modified by one or more bracket-enclosed "filters". Selectors are used to
-traverse the etree from element to element, while filters are used to narrow
-the list of candidate elements at each node.
-
-Although etree Path strings are similar to XPath strings
-(https://www.w3.org/TR/1999/REC-xpath-19991116/), they have a more limited set
-of selectors and filtering options.
-
-The following selectors are supported by etree Path strings:
-
- . Select the current element.
- .. Select the parent of the current element.
- * Select all child elements of the current element.
- / Select the root element when used at the start of a path.
- // Select all descendants of the current element.
- tag Select all child elements with a name matching the tag.
-
-The following basic filters are supported by etree Path strings:
-
- [@attrib] Keep elements with an attribute named attrib.
- [@attrib='val'] Keep elements with an attribute named attrib and value matching val.
- [tag] Keep elements with a child element named tag.
- [tag='val'] Keep elements with a child element named tag and text matching val.
- [n] Keep the n-th element, where n is a numeric index starting from 1.
-
-The following function filters are also supported:
-
- [text()] Keep elements with non-empty text.
- [text()='val'] Keep elements whose text matches val.
- [local-name()='val'] Keep elements whose un-prefixed tag matches val.
- [name()='val'] Keep elements whose full tag exactly matches val.
- [namespace-prefix()='val'] Keep elements whose namespace prefix matches val.
- [namespace-uri()='val'] Keep elements whose namespace URI matches val.
-
-Here are some examples of Path strings:
-
-- Select the bookstore child element of the root element:
- /bookstore
-
-- Beginning from the root element, select the title elements of all
-descendant book elements having a 'category' attribute of 'WEB':
- //book[@category='WEB']/title
-
-- Beginning from the current element, select the first descendant
-book element with a title child element containing the text 'Great
-Expectations':
- .//book[title='Great Expectations'][1]
-
-- Beginning from the current element, select all child elements of
-book elements with an attribute 'language' set to 'english':
- ./book/*[@language='english']
-
-- Beginning from the current element, select all child elements of
-book elements containing the text 'special':
- ./book/*[text()='special']
-
-- Beginning from the current element, select all descendant book
-elements whose title child element has a 'language' attribute of 'french':
- .//book/title[@language='french']/..
-
-- Beginning from the current element, select all book elements
-belonging to the http://www.w3.org/TR/html4/ namespace:
- .//book[namespace-uri()='http://www.w3.org/TR/html4/']
-
-*/
-type Path struct {
- segments []segment
-}
-
-// ErrPath is returned by path functions when an invalid etree path is provided.
-type ErrPath string
-
-// Error returns the string describing a path error.
-func (err ErrPath) Error() string {
- return "etree: " + string(err)
-}
-
-// CompilePath creates an optimized version of an XPath-like string that
-// can be used to query elements in an element tree.
-func CompilePath(path string) (Path, error) {
- var comp compiler
- segments := comp.parsePath(path)
- if comp.err != ErrPath("") {
- return Path{nil}, comp.err
- }
- return Path{segments}, nil
-}
-
-// MustCompilePath creates an optimized version of an XPath-like string that
-// can be used to query elements in an element tree. Panics if an error
-// occurs. Use this function to create Paths when you know the path is
-// valid (i.e., if it's hard-coded).
-func MustCompilePath(path string) Path {
- p, err := CompilePath(path)
- if err != nil {
- panic(err)
- }
- return p
-}
-
-// A segment is a portion of a path between "/" characters.
-// It contains one selector and zero or more [filters].
-type segment struct {
- sel selector
- filters []filter
-}
-
-func (seg *segment) apply(e *Element, p *pather) {
- seg.sel.apply(e, p)
- for _, f := range seg.filters {
- f.apply(p)
- }
-}
-
-// A selector selects XML elements for consideration by the
-// path traversal.
-type selector interface {
- apply(e *Element, p *pather)
-}
-
-// A filter pares down a list of candidate XML elements based
-// on a path filter in [brackets].
-type filter interface {
- apply(p *pather)
-}
-
-// A pather is helper object that traverses an element tree using
-// a Path object. It collects and deduplicates all elements matching
-// the path query.
-type pather struct {
- queue fifo
- results []*Element
- inResults map[*Element]bool
- candidates []*Element
- scratch []*Element // used by filters
-}
-
-// A node represents an element and the remaining path segments that
-// should be applied against it by the pather.
-type node struct {
- e *Element
- segments []segment
-}
-
-func newPather() *pather {
- return &pather{
- results: make([]*Element, 0),
- inResults: make(map[*Element]bool),
- candidates: make([]*Element, 0),
- scratch: make([]*Element, 0),
- }
-}
-
-// traverse follows the path from the element e, collecting
-// and then returning all elements that match the path's selectors
-// and filters.
-func (p *pather) traverse(e *Element, path Path) []*Element {
- for p.queue.add(node{e, path.segments}); p.queue.len() > 0; {
- p.eval(p.queue.remove().(node))
- }
- return p.results
-}
-
-// eval evalutes the current path node by applying the remaining
-// path's selector rules against the node's element.
-func (p *pather) eval(n node) {
- p.candidates = p.candidates[0:0]
- seg, remain := n.segments[0], n.segments[1:]
- seg.apply(n.e, p)
-
- if len(remain) == 0 {
- for _, c := range p.candidates {
- if in := p.inResults[c]; !in {
- p.inResults[c] = true
- p.results = append(p.results, c)
- }
- }
- } else {
- for _, c := range p.candidates {
- p.queue.add(node{c, remain})
- }
- }
-}
-
-// A compiler generates a compiled path from a path string.
-type compiler struct {
- err ErrPath
-}
-
-// parsePath parses an XPath-like string describing a path
-// through an element tree and returns a slice of segment
-// descriptors.
-func (c *compiler) parsePath(path string) []segment {
- // If path ends with //, fix it
- if strings.HasSuffix(path, "//") {
- path = path + "*"
- }
-
- var segments []segment
-
- // Check for an absolute path
- if strings.HasPrefix(path, "/") {
- segments = append(segments, segment{new(selectRoot), []filter{}})
- path = path[1:]
- }
-
- // Split path into segments
- for _, s := range splitPath(path) {
- segments = append(segments, c.parseSegment(s))
- if c.err != ErrPath("") {
- break
- }
- }
- return segments
-}
-
-func splitPath(path string) []string {
- pieces := make([]string, 0)
- start := 0
- inquote := false
- for i := 0; i+1 <= len(path); i++ {
- if path[i] == '\'' {
- inquote = !inquote
- } else if path[i] == '/' && !inquote {
- pieces = append(pieces, path[start:i])
- start = i + 1
- }
- }
- return append(pieces, path[start:])
-}
-
-// parseSegment parses a path segment between / characters.
-func (c *compiler) parseSegment(path string) segment {
- pieces := strings.Split(path, "[")
- seg := segment{
- sel: c.parseSelector(pieces[0]),
- filters: []filter{},
- }
- for i := 1; i < len(pieces); i++ {
- fpath := pieces[i]
- if fpath[len(fpath)-1] != ']' {
- c.err = ErrPath("path has invalid filter [brackets].")
- break
- }
- seg.filters = append(seg.filters, c.parseFilter(fpath[:len(fpath)-1]))
- }
- return seg
-}
-
-// parseSelector parses a selector at the start of a path segment.
-func (c *compiler) parseSelector(path string) selector {
- switch path {
- case ".":
- return new(selectSelf)
- case "..":
- return new(selectParent)
- case "*":
- return new(selectChildren)
- case "":
- return new(selectDescendants)
- default:
- return newSelectChildrenByTag(path)
- }
-}
-
-var fnTable = map[string]struct {
- hasFn func(e *Element) bool
- getValFn func(e *Element) string
-}{
- "local-name": {nil, (*Element).name},
- "name": {nil, (*Element).FullTag},
- "namespace-prefix": {nil, (*Element).namespacePrefix},
- "namespace-uri": {nil, (*Element).NamespaceURI},
- "text": {(*Element).hasText, (*Element).Text},
-}
-
-// parseFilter parses a path filter contained within [brackets].
-func (c *compiler) parseFilter(path string) filter {
- if len(path) == 0 {
- c.err = ErrPath("path contains an empty filter expression.")
- return nil
- }
-
- // Filter contains [@attr='val'], [fn()='val'], or [tag='val']?
- eqindex := strings.Index(path, "='")
- if eqindex >= 0 {
- rindex := nextIndex(path, "'", eqindex+2)
- if rindex != len(path)-1 {
- c.err = ErrPath("path has mismatched filter quotes.")
- return nil
- }
-
- key := path[:eqindex]
- value := path[eqindex+2 : rindex]
-
- switch {
- case key[0] == '@':
- return newFilterAttrVal(key[1:], value)
- case strings.HasSuffix(key, "()"):
- fn := key[:len(key)-2]
- if t, ok := fnTable[fn]; ok && t.getValFn != nil {
- return newFilterFuncVal(t.getValFn, value)
- }
- c.err = ErrPath("path has unknown function " + fn)
- return nil
- default:
- return newFilterChildText(key, value)
- }
- }
-
- // Filter contains [@attr], [N], [tag] or [fn()]
- switch {
- case path[0] == '@':
- return newFilterAttr(path[1:])
- case strings.HasSuffix(path, "()"):
- fn := path[:len(path)-2]
- if t, ok := fnTable[fn]; ok && t.hasFn != nil {
- return newFilterFunc(t.hasFn)
- }
- c.err = ErrPath("path has unknown function " + fn)
- return nil
- case isInteger(path):
- pos, _ := strconv.Atoi(path)
- switch {
- case pos > 0:
- return newFilterPos(pos - 1)
- default:
- return newFilterPos(pos)
- }
- default:
- return newFilterChild(path)
- }
-}
-
-// selectSelf selects the current element into the candidate list.
-type selectSelf struct{}
-
-func (s *selectSelf) apply(e *Element, p *pather) {
- p.candidates = append(p.candidates, e)
-}
-
-// selectRoot selects the element's root node.
-type selectRoot struct{}
-
-func (s *selectRoot) apply(e *Element, p *pather) {
- root := e
- for root.parent != nil {
- root = root.parent
- }
- p.candidates = append(p.candidates, root)
-}
-
-// selectParent selects the element's parent into the candidate list.
-type selectParent struct{}
-
-func (s *selectParent) apply(e *Element, p *pather) {
- if e.parent != nil {
- p.candidates = append(p.candidates, e.parent)
- }
-}
-
-// selectChildren selects the element's child elements into the
-// candidate list.
-type selectChildren struct{}
-
-func (s *selectChildren) apply(e *Element, p *pather) {
- for _, c := range e.Child {
- if c, ok := c.(*Element); ok {
- p.candidates = append(p.candidates, c)
- }
- }
-}
-
-// selectDescendants selects all descendant child elements
-// of the element into the candidate list.
-type selectDescendants struct{}
-
-func (s *selectDescendants) apply(e *Element, p *pather) {
- var queue fifo
- for queue.add(e); queue.len() > 0; {
- e := queue.remove().(*Element)
- p.candidates = append(p.candidates, e)
- for _, c := range e.Child {
- if c, ok := c.(*Element); ok {
- queue.add(c)
- }
- }
- }
-}
-
-// selectChildrenByTag selects into the candidate list all child
-// elements of the element having the specified tag.
-type selectChildrenByTag struct {
- space, tag string
-}
-
-func newSelectChildrenByTag(path string) *selectChildrenByTag {
- s, l := spaceDecompose(path)
- return &selectChildrenByTag{s, l}
-}
-
-func (s *selectChildrenByTag) apply(e *Element, p *pather) {
- for _, c := range e.Child {
- if c, ok := c.(*Element); ok && spaceMatch(s.space, c.Space) && s.tag == c.Tag {
- p.candidates = append(p.candidates, c)
- }
- }
-}
-
-// filterPos filters the candidate list, keeping only the
-// candidate at the specified index.
-type filterPos struct {
- index int
-}
-
-func newFilterPos(pos int) *filterPos {
- return &filterPos{pos}
-}
-
-func (f *filterPos) apply(p *pather) {
- if f.index >= 0 {
- if f.index < len(p.candidates) {
- p.scratch = append(p.scratch, p.candidates[f.index])
- }
- } else {
- if -f.index <= len(p.candidates) {
- p.scratch = append(p.scratch, p.candidates[len(p.candidates)+f.index])
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterAttr filters the candidate list for elements having
-// the specified attribute.
-type filterAttr struct {
- space, key string
-}
-
-func newFilterAttr(str string) *filterAttr {
- s, l := spaceDecompose(str)
- return &filterAttr{s, l}
-}
-
-func (f *filterAttr) apply(p *pather) {
- for _, c := range p.candidates {
- for _, a := range c.Attr {
- if spaceMatch(f.space, a.Space) && f.key == a.Key {
- p.scratch = append(p.scratch, c)
- break
- }
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterAttrVal filters the candidate list for elements having
-// the specified attribute with the specified value.
-type filterAttrVal struct {
- space, key, val string
-}
-
-func newFilterAttrVal(str, value string) *filterAttrVal {
- s, l := spaceDecompose(str)
- return &filterAttrVal{s, l, value}
-}
-
-func (f *filterAttrVal) apply(p *pather) {
- for _, c := range p.candidates {
- for _, a := range c.Attr {
- if spaceMatch(f.space, a.Space) && f.key == a.Key && f.val == a.Value {
- p.scratch = append(p.scratch, c)
- break
- }
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterFunc filters the candidate list for elements satisfying a custom
-// boolean function.
-type filterFunc struct {
- fn func(e *Element) bool
-}
-
-func newFilterFunc(fn func(e *Element) bool) *filterFunc {
- return &filterFunc{fn}
-}
-
-func (f *filterFunc) apply(p *pather) {
- for _, c := range p.candidates {
- if f.fn(c) {
- p.scratch = append(p.scratch, c)
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterFuncVal filters the candidate list for elements containing a value
-// matching the result of a custom function.
-type filterFuncVal struct {
- fn func(e *Element) string
- val string
-}
-
-func newFilterFuncVal(fn func(e *Element) string, value string) *filterFuncVal {
- return &filterFuncVal{fn, value}
-}
-
-func (f *filterFuncVal) apply(p *pather) {
- for _, c := range p.candidates {
- if f.fn(c) == f.val {
- p.scratch = append(p.scratch, c)
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterChild filters the candidate list for elements having
-// a child element with the specified tag.
-type filterChild struct {
- space, tag string
-}
-
-func newFilterChild(str string) *filterChild {
- s, l := spaceDecompose(str)
- return &filterChild{s, l}
-}
-
-func (f *filterChild) apply(p *pather) {
- for _, c := range p.candidates {
- for _, cc := range c.Child {
- if cc, ok := cc.(*Element); ok &&
- spaceMatch(f.space, cc.Space) &&
- f.tag == cc.Tag {
- p.scratch = append(p.scratch, c)
- }
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterChildText filters the candidate list for elements having
-// a child element with the specified tag and text.
-type filterChildText struct {
- space, tag, text string
-}
-
-func newFilterChildText(str, text string) *filterChildText {
- s, l := spaceDecompose(str)
- return &filterChildText{s, l, text}
-}
-
-func (f *filterChildText) apply(p *pather) {
- for _, c := range p.candidates {
- for _, cc := range c.Child {
- if cc, ok := cc.(*Element); ok &&
- spaceMatch(f.space, cc.Space) &&
- f.tag == cc.Tag &&
- f.text == cc.Text() {
- p.scratch = append(p.scratch, c)
- }
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore b/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
new file mode 100644
index 0000000..fb22c99
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
@@ -0,0 +1,4 @@
+.vscode
+.idea
+*.swp
+jv
diff --git a/vendor/github.com/xeipuuv/gojsonreference/LICENSE-APACHE-2.0.txt b/vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE
similarity index 89%
rename from vendor/github.com/xeipuuv/gojsonreference/LICENSE-APACHE-2.0.txt
rename to vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE
index 55ede8a..19dc35b 100644
--- a/vendor/github.com/xeipuuv/gojsonreference/LICENSE-APACHE-2.0.txt
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE
@@ -172,31 +172,4 @@
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2015 xeipuuv
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+ of your accepting any such warranty or additional liability.
\ No newline at end of file
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md b/vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
new file mode 100644
index 0000000..8c66ba0
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
@@ -0,0 +1,215 @@
+# jsonschema v5.1.1
+
+[](https://opensource.org/licenses/Apache-2.0)
+[](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
+[](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema/v5)
+[](/~https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml)
+[](https://codecov.io/github/santhosh-tekuri/jsonschema?branch=master)
+
+Package jsonschema provides json-schema compilation and validation.
+
+[Benchmarks](https://dev.to/vearutop/benchmarking-correctness-and-performance-of-go-json-schema-validators-3247)
+
+### Features:
+ - implements
+ [draft 2020-12](https://json-schema.org/specification-links.html#2020-12),
+ [draft 2019-09](https://json-schema.org/specification-links.html#draft-2019-09-formerly-known-as-draft-8),
+ [draft-7](https://json-schema.org/specification-links.html#draft-7),
+ [draft-6](https://json-schema.org/specification-links.html#draft-6),
+ [draft-4](https://json-schema.org/specification-links.html#draft-4)
+ - fully compliant with [JSON-Schema-Test-Suite](/~https://github.com/json-schema-org/JSON-Schema-Test-Suite), (excluding some optional)
+ - list of optional tests that are excluded can be found in schema_test.go(variable [skipTests](/~https://github.com/santhosh-tekuri/jsonschema/blob/master/schema_test.go#L24))
+ - validates schemas against meta-schema
+ - full support of remote references
+ - support of recursive references between schemas
+ - detects infinite loop in schemas
+ - thread safe validation
+ - rich, intuitive hierarchial error messages with json-pointers to exact location
+ - supports output formats flag, basic and detailed
+ - supports enabling format and content Assertions in draft2019-09 or above
+ - change `Compiler.AssertFormat`, `Compiler.AssertContent` to `true`
+ - compiled schema can be introspected. easier to develop tools like generating go structs given schema
+ - supports user-defined keywords via [extensions](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-Extension)
+ - implements following formats (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedFormat))
+ - date-time, date, time, duration, period (supports leap-second)
+ - uuid, hostname, email
+ - ip-address, ipv4, ipv6
+ - uri, uriref, uri-template(limited validation)
+ - json-pointer, relative-json-pointer
+ - regex, format
+ - implements following contentEncoding (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
+ - base64
+ - implements following contentMediaType (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
+ - application/json
+ - can load from files/http/https/[string](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-FromString)/[]byte/io.Reader (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedLoader))
+
+
+see examples in [godoc](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
+
+The schema is compiled against the version specified in `$schema` property.
+If "$schema" property is missing, it uses latest draft which currently implemented
+by this library.
+
+You can force to use specific version, when `$schema` is missing, as follows:
+
+```go
+compiler := jsonschema.NewCompiler()
+compiler.Draft = jsonschema.Draft4
+```
+
+This package supports loading json-schema from filePath and fileURL.
+
+To load json-schema from HTTPURL, add following import:
+
+```go
+import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
+```
+
+## Rich Errors
+
+The ValidationError returned by Validate method contains detailed context to understand why and where the error is.
+
+schema.json:
+```json
+{
+ "$ref": "t.json#/definitions/employee"
+}
+```
+
+t.json:
+```json
+{
+ "definitions": {
+ "employee": {
+ "type": "string"
+ }
+ }
+}
+```
+
+doc.json:
+```json
+1
+```
+
+assuming `err` is the ValidationError returned when `doc.json` validated with `schema.json`,
+```go
+fmt.Printf("%#v\n", err) // using %#v prints errors hierarchy
+```
+Prints:
+```
+[I#] [S#] doesn't validate with file:///Users/santhosh/jsonschema/schema.json#
+ [I#] [S#/$ref] doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'
+ [I#] [S#/definitions/employee/type] expected string, but got number
+```
+
+Here `I` stands for instance document and `S` stands for schema document.
+The json-fragments that caused error in instance and schema documents are represented using json-pointer notation.
+Nested causes are printed with indent.
+
+To output `err` in `flag` output format:
+```go
+b, _ := json.MarshalIndent(err.FlagOutput(), "", " ")
+fmt.Println(string(b))
+```
+Prints:
+```json
+{
+ "valid": false
+}
+```
+To output `err` in `basic` output format:
+```go
+b, _ := json.MarshalIndent(err.BasicOutput(), "", " ")
+fmt.Println(string(b))
+```
+Prints:
+```json
+{
+ "valid": false,
+ "errors": [
+ {
+ "keywordLocation": "",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
+ "instanceLocation": "",
+ "error": "doesn't validate with file:///Users/santhosh/jsonschema/schema.json#"
+ },
+ {
+ "keywordLocation": "/$ref",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
+ "instanceLocation": "",
+ "error": "doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'"
+ },
+ {
+ "keywordLocation": "/$ref/type",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
+ "instanceLocation": "",
+ "error": "expected string, but got number"
+ }
+ ]
+}
+```
+To output `err` in `detailed` output format:
+```go
+b, _ := json.MarshalIndent(err.DetailedOutput(), "", " ")
+fmt.Println(string(b))
+```
+Prints:
+```json
+{
+ "valid": false,
+ "keywordLocation": "",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
+ "instanceLocation": "",
+ "errors": [
+ {
+ "valid": false,
+ "keywordLocation": "/$ref",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
+ "instanceLocation": "",
+ "errors": [
+ {
+ "valid": false,
+ "keywordLocation": "/$ref/type",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
+ "instanceLocation": "",
+ "error": "expected string, but got number"
+ }
+ ]
+ }
+ ]
+}
+```
+
+## CLI
+
+to install `go install github.com/santhosh-tekuri/jsonschema/v5/cmd/jv@latest`
+
+```bash
+jv [-draft INT] [-output FORMAT] [-assertformat] [-assertcontent] []...
+ -assertcontent
+ enable content assertions with draft >= 2019
+ -assertformat
+ enable format assertions with draft >= 2019
+ -draft int
+ draft used when '$schema' attribute is missing. valid values 4, 5, 7, 2019, 2020 (default 2020)
+ -output string
+ output format. valid values flag, basic, detailed
+```
+
+if no `` arguments are passed, it simply validates the ``.
+if `$schema` attribute is missing in schema, it uses latest version. this can be overridden by passing `-draft` flag
+
+exit-code is 1, if there are any validation errors
+
+## Validating YAML Documents
+
+since yaml supports non-string keys, such yaml documents are rendered as invalid json documents.
+yaml parser returns `map[interface{}]interface{}` for object, whereas json parser returns `map[string]interface{}`.
+this package accepts only `map[string]interface{}`, so we need to manually convert them to `map[string]interface{}`
+
+https://play.golang.org/p/Hhax3MrtD8r
+
+the above example shows how to validate yaml document with jsonschema.
+the conversion explained above is implemented by `toStringKeys` function
+
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
new file mode 100644
index 0000000..3f3cc80
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
@@ -0,0 +1,771 @@
+package jsonschema
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "math/big"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+// A Compiler represents a json-schema compiler.
+type Compiler struct {
+ // Draft represents the draft used when '$schema' attribute is missing.
+ //
+ // This defaults to latest supported draft (currently 2020-12).
+ Draft *Draft
+ resources map[string]*resource
+
+ // Extensions is used to register extensions.
+ extensions map[string]extension
+
+ // ExtractAnnotations tells whether schema annotations has to be extracted
+ // in compiled Schema or not.
+ ExtractAnnotations bool
+
+ // LoadURL loads the document at given absolute URL.
+ //
+ // If nil, package global LoadURL is used.
+ LoadURL func(s string) (io.ReadCloser, error)
+
+ // AssertFormat for specifications >= draft2019-09.
+ AssertFormat bool
+
+ // AssertContent for specifications >= draft2019-09.
+ AssertContent bool
+}
+
+// Compile parses json-schema at given url returns, if successful,
+// a Schema object that can be used to match against json.
+//
+// Returned error can be *SchemaError
+func Compile(url string) (*Schema, error) {
+ return NewCompiler().Compile(url)
+}
+
+// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
+// It simplifies safe initialization of global variables holding compiled Schemas.
+func MustCompile(url string) *Schema {
+ return NewCompiler().MustCompile(url)
+}
+
+// CompileString parses and compiles the given schema with given base url.
+func CompileString(url, schema string) (*Schema, error) {
+ c := NewCompiler()
+ if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
+ return nil, err
+ }
+ return c.Compile(url)
+}
+
+// MustCompileString is like CompileString but panics on error.
+// It simplified safe initialization of global variables holding compiled Schema.
+func MustCompileString(url, schema string) *Schema {
+ c := NewCompiler()
+ if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
+ panic(err)
+ }
+ return c.MustCompile(url)
+}
+
+// NewCompiler returns a json-schema Compiler object.
+// if '$schema' attribute is missing, it is treated as draft7. to change this
+// behavior change Compiler.Draft value
+func NewCompiler() *Compiler {
+ return &Compiler{Draft: latest, resources: make(map[string]*resource), extensions: make(map[string]extension)}
+}
+
+// AddResource adds in-memory resource to the compiler.
+//
+// Note that url must not have fragment
+func (c *Compiler) AddResource(url string, r io.Reader) error {
+ res, err := newResource(url, r)
+ if err != nil {
+ return err
+ }
+ c.resources[res.url] = res
+ return nil
+}
+
+// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
+// It simplifies safe initialization of global variables holding compiled Schemas.
+func (c *Compiler) MustCompile(url string) *Schema {
+ s, err := c.Compile(url)
+ if err != nil {
+ panic(fmt.Sprintf("jsonschema: %#v", err))
+ }
+ return s
+}
+
+// Compile parses json-schema at given url returns, if successful,
+// a Schema object that can be used to match against json.
+//
+// error returned will be of type *SchemaError
+func (c *Compiler) Compile(url string) (*Schema, error) {
+ // make url absolute
+ u, err := toAbs(url)
+ if err != nil {
+ return nil, &SchemaError{url, err}
+ }
+ url = u
+
+ sch, err := c.compileURL(url, nil, "#")
+ if err != nil {
+ err = &SchemaError{url, err}
+ }
+ return sch, err
+}
+
+func (c *Compiler) findResource(url string) (*resource, error) {
+ if _, ok := c.resources[url]; !ok {
+ // load resource
+ var rdr io.Reader
+ if sch, ok := vocabSchemas[url]; ok {
+ rdr = strings.NewReader(sch)
+ } else {
+ loadURL := LoadURL
+ if c.LoadURL != nil {
+ loadURL = c.LoadURL
+ }
+ r, err := loadURL(url)
+ if err != nil {
+ return nil, err
+ }
+ defer r.Close()
+ rdr = r
+ }
+ if err := c.AddResource(url, rdr); err != nil {
+ return nil, err
+ }
+ }
+
+ r := c.resources[url]
+ if r.draft != nil {
+ return r, nil
+ }
+
+ // set draft
+ r.draft = c.Draft
+ if m, ok := r.doc.(map[string]interface{}); ok {
+ if sch, ok := m["$schema"]; ok {
+ sch, ok := sch.(string)
+ if !ok {
+ return nil, fmt.Errorf("jsonschema: invalid $schema in %s", url)
+ }
+ if !isURI(sch) {
+ return nil, fmt.Errorf("jsonschema: $schema must be uri in %s", url)
+ }
+ r.draft = findDraft(sch)
+ if r.draft == nil {
+ sch, _ := split(sch)
+ if sch == url {
+ return nil, fmt.Errorf("jsonschema: unsupported draft in %s", url)
+ }
+ mr, err := c.findResource(sch)
+ if err != nil {
+ return nil, err
+ }
+ r.draft = mr.draft
+ }
+ }
+ }
+
+ id, err := r.draft.resolveID(r.url, r.doc)
+ if err != nil {
+ return nil, err
+ }
+ if id != "" {
+ r.url = id
+ }
+
+ if err := r.fillSubschemas(c, r); err != nil {
+ return nil, err
+ }
+
+ return r, nil
+}
+
+func (c *Compiler) compileURL(url string, stack []schemaRef, ptr string) (*Schema, error) {
+ // if url points to a draft, return Draft.meta
+ if d := findDraft(url); d != nil && d.meta != nil {
+ return d.meta, nil
+ }
+
+ b, f := split(url)
+ r, err := c.findResource(b)
+ if err != nil {
+ return nil, err
+ }
+ return c.compileRef(r, stack, ptr, r, f)
+}
+
+func (c *Compiler) compileRef(r *resource, stack []schemaRef, refPtr string, res *resource, ref string) (*Schema, error) {
+ base := r.baseURL(res.floc)
+ ref, err := resolveURL(base, ref)
+ if err != nil {
+ return nil, err
+ }
+
+ u, f := split(ref)
+ sr := r.findResource(u)
+ if sr == nil {
+ // external resource
+ return c.compileURL(ref, stack, refPtr)
+ }
+
+ // ensure root resource is always compiled first.
+ // this is required to get schema.meta from root resource
+ if r.schema == nil {
+ r.schema = newSchema(r.url, r.floc, r.doc)
+ if _, err := c.compile(r, nil, schemaRef{"#", r.schema, false}, r); err != nil {
+ return nil, err
+ }
+ }
+
+ sr, err = r.resolveFragment(c, sr, f)
+ if err != nil {
+ return nil, err
+ }
+ if sr == nil {
+ return nil, fmt.Errorf("jsonschema: %s not found", ref)
+ }
+
+ if sr.schema != nil {
+ if err := checkLoop(stack, schemaRef{refPtr, sr.schema, false}); err != nil {
+ return nil, err
+ }
+ return sr.schema, nil
+ }
+
+ sr.schema = newSchema(r.url, sr.floc, sr.doc)
+ return c.compile(r, stack, schemaRef{refPtr, sr.schema, false}, sr)
+}
+
+func (c *Compiler) compileDynamicAnchors(r *resource, res *resource) error {
+ if r.draft.version < 2020 {
+ return nil
+ }
+
+ rr := r.listResources(res)
+ rr = append(rr, res)
+ for _, sr := range rr {
+ if m, ok := sr.doc.(map[string]interface{}); ok {
+ if _, ok := m["$dynamicAnchor"]; ok {
+ sch, err := c.compileRef(r, nil, "IGNORED", r, sr.floc)
+ if err != nil {
+ return err
+ }
+ res.schema.dynamicAnchors = append(res.schema.dynamicAnchors, sch)
+ }
+ }
+ }
+ return nil
+}
+
+func (c *Compiler) compile(r *resource, stack []schemaRef, sref schemaRef, res *resource) (*Schema, error) {
+ if err := c.compileDynamicAnchors(r, res); err != nil {
+ return nil, err
+ }
+
+ switch v := res.doc.(type) {
+ case bool:
+ res.schema.Always = &v
+ return res.schema, nil
+ default:
+ return res.schema, c.compileMap(r, stack, sref, res)
+ }
+}
+
+func (c *Compiler) compileMap(r *resource, stack []schemaRef, sref schemaRef, res *resource) error {
+ m := res.doc.(map[string]interface{})
+
+ if err := checkLoop(stack, sref); err != nil {
+ return err
+ }
+ stack = append(stack, sref)
+
+ var s = res.schema
+ var err error
+
+ if r == res { // root schema
+ if sch, ok := m["$schema"]; ok {
+ sch := sch.(string)
+ if d := findDraft(sch); d != nil {
+ s.meta = d.meta
+ } else {
+ if s.meta, err = c.compileRef(r, stack, "$schema", res, sch); err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ if ref, ok := m["$ref"]; ok {
+ s.Ref, err = c.compileRef(r, stack, "$ref", res, ref.(string))
+ if err != nil {
+ return err
+ }
+ if r.draft.version < 2019 {
+ // All other properties in a "$ref" object MUST be ignored
+ return nil
+ }
+ }
+
+ if r.draft.version >= 2019 {
+ if r == res { // root schema
+ if vocab, ok := m["$vocabulary"]; ok {
+ for url := range vocab.(map[string]interface{}) {
+ if !r.draft.isVocab(url) {
+ return fmt.Errorf("jsonschema: unsupported vocab %q in %s", url, res)
+ }
+ s.vocab = append(s.vocab, url)
+ }
+ } else {
+ s.vocab = r.draft.defaultVocab
+ }
+ }
+
+ if ref, ok := m["$recursiveRef"]; ok {
+ s.RecursiveRef, err = c.compileRef(r, stack, "$recursiveRef", res, ref.(string))
+ if err != nil {
+ return err
+ }
+ }
+ }
+ if r.draft.version >= 2020 {
+ if dref, ok := m["$dynamicRef"]; ok {
+ s.DynamicRef, err = c.compileRef(r, stack, "$dynamicRef", res, dref.(string))
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ loadInt := func(pname string) int {
+ if num, ok := m[pname]; ok {
+ i, _ := num.(json.Number).Float64()
+ return int(i)
+ }
+ return -1
+ }
+
+ loadRat := func(pname string) *big.Rat {
+ if num, ok := m[pname]; ok {
+ r, _ := new(big.Rat).SetString(string(num.(json.Number)))
+ return r
+ }
+ return nil
+ }
+
+ if r.draft.version < 2019 || r.schema.meta.hasVocab("validation") {
+ if t, ok := m["type"]; ok {
+ switch t := t.(type) {
+ case string:
+ s.Types = []string{t}
+ case []interface{}:
+ s.Types = toStrings(t)
+ }
+ }
+
+ if e, ok := m["enum"]; ok {
+ s.Enum = e.([]interface{})
+ allPrimitives := true
+ for _, item := range s.Enum {
+ switch jsonType(item) {
+ case "object", "array":
+ allPrimitives = false
+ break
+ }
+ }
+ s.enumError = "enum failed"
+ if allPrimitives {
+ if len(s.Enum) == 1 {
+ s.enumError = fmt.Sprintf("value must be %#v", s.Enum[0])
+ } else {
+ strEnum := make([]string, len(s.Enum))
+ for i, item := range s.Enum {
+ strEnum[i] = fmt.Sprintf("%#v", item)
+ }
+ s.enumError = fmt.Sprintf("value must be one of %s", strings.Join(strEnum, ", "))
+ }
+ }
+ }
+
+ s.Minimum = loadRat("minimum")
+ if exclusive, ok := m["exclusiveMinimum"]; ok {
+ if exclusive, ok := exclusive.(bool); ok {
+ if exclusive {
+ s.Minimum, s.ExclusiveMinimum = nil, s.Minimum
+ }
+ } else {
+ s.ExclusiveMinimum = loadRat("exclusiveMinimum")
+ }
+ }
+
+ s.Maximum = loadRat("maximum")
+ if exclusive, ok := m["exclusiveMaximum"]; ok {
+ if exclusive, ok := exclusive.(bool); ok {
+ if exclusive {
+ s.Maximum, s.ExclusiveMaximum = nil, s.Maximum
+ }
+ } else {
+ s.ExclusiveMaximum = loadRat("exclusiveMaximum")
+ }
+ }
+
+ s.MultipleOf = loadRat("multipleOf")
+
+ s.MinProperties, s.MaxProperties = loadInt("minProperties"), loadInt("maxProperties")
+
+ if req, ok := m["required"]; ok {
+ s.Required = toStrings(req.([]interface{}))
+ }
+
+ s.MinItems, s.MaxItems = loadInt("minItems"), loadInt("maxItems")
+
+ if unique, ok := m["uniqueItems"]; ok {
+ s.UniqueItems = unique.(bool)
+ }
+
+ s.MinLength, s.MaxLength = loadInt("minLength"), loadInt("maxLength")
+
+ if pattern, ok := m["pattern"]; ok {
+ s.Pattern = regexp.MustCompile(pattern.(string))
+ }
+
+ if r.draft.version >= 2019 {
+ s.MinContains, s.MaxContains = loadInt("minContains"), loadInt("maxContains")
+ if s.MinContains == -1 {
+ s.MinContains = 1
+ }
+
+ if deps, ok := m["dependentRequired"]; ok {
+ deps := deps.(map[string]interface{})
+ s.DependentRequired = make(map[string][]string, len(deps))
+ for pname, pvalue := range deps {
+ s.DependentRequired[pname] = toStrings(pvalue.([]interface{}))
+ }
+ }
+ }
+ }
+
+ compile := func(stack []schemaRef, ptr string) (*Schema, error) {
+ return c.compileRef(r, stack, ptr, res, r.url+res.floc+"/"+ptr)
+ }
+
+ loadSchema := func(pname string, stack []schemaRef) (*Schema, error) {
+ if _, ok := m[pname]; ok {
+ return compile(stack, escape(pname))
+ }
+ return nil, nil
+ }
+
+ loadSchemas := func(pname string, stack []schemaRef) ([]*Schema, error) {
+ if pvalue, ok := m[pname]; ok {
+ pvalue := pvalue.([]interface{})
+ schemas := make([]*Schema, len(pvalue))
+ for i := range pvalue {
+ sch, err := compile(stack, escape(pname)+"/"+strconv.Itoa(i))
+ if err != nil {
+ return nil, err
+ }
+ schemas[i] = sch
+ }
+ return schemas, nil
+ }
+ return nil, nil
+ }
+
+ if r.draft.version < 2019 || r.schema.meta.hasVocab("applicator") {
+ if s.Not, err = loadSchema("not", stack); err != nil {
+ return err
+ }
+ if s.AllOf, err = loadSchemas("allOf", stack); err != nil {
+ return err
+ }
+ if s.AnyOf, err = loadSchemas("anyOf", stack); err != nil {
+ return err
+ }
+ if s.OneOf, err = loadSchemas("oneOf", stack); err != nil {
+ return err
+ }
+
+ if props, ok := m["properties"]; ok {
+ props := props.(map[string]interface{})
+ s.Properties = make(map[string]*Schema, len(props))
+ for pname := range props {
+ s.Properties[pname], err = compile(nil, "properties/"+escape(pname))
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ if regexProps, ok := m["regexProperties"]; ok {
+ s.RegexProperties = regexProps.(bool)
+ }
+
+ if patternProps, ok := m["patternProperties"]; ok {
+ patternProps := patternProps.(map[string]interface{})
+ s.PatternProperties = make(map[*regexp.Regexp]*Schema, len(patternProps))
+ for pattern := range patternProps {
+ s.PatternProperties[regexp.MustCompile(pattern)], err = compile(nil, "patternProperties/"+escape(pattern))
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ if additionalProps, ok := m["additionalProperties"]; ok {
+ switch additionalProps := additionalProps.(type) {
+ case bool:
+ s.AdditionalProperties = additionalProps
+ case map[string]interface{}:
+ s.AdditionalProperties, err = compile(nil, "additionalProperties")
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ if deps, ok := m["dependencies"]; ok {
+ deps := deps.(map[string]interface{})
+ s.Dependencies = make(map[string]interface{}, len(deps))
+ for pname, pvalue := range deps {
+ switch pvalue := pvalue.(type) {
+ case []interface{}:
+ s.Dependencies[pname] = toStrings(pvalue)
+ default:
+ s.Dependencies[pname], err = compile(stack, "dependencies/"+escape(pname))
+ if err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ if r.draft.version >= 6 {
+ if s.PropertyNames, err = loadSchema("propertyNames", nil); err != nil {
+ return err
+ }
+ if s.Contains, err = loadSchema("contains", nil); err != nil {
+ return err
+ }
+ }
+
+ if r.draft.version >= 7 {
+ if m["if"] != nil {
+ if s.If, err = loadSchema("if", stack); err != nil {
+ return err
+ }
+ if s.Then, err = loadSchema("then", stack); err != nil {
+ return err
+ }
+ if s.Else, err = loadSchema("else", stack); err != nil {
+ return err
+ }
+ }
+ }
+ if r.draft.version >= 2019 {
+ if deps, ok := m["dependentSchemas"]; ok {
+ deps := deps.(map[string]interface{})
+ s.DependentSchemas = make(map[string]*Schema, len(deps))
+ for pname := range deps {
+ s.DependentSchemas[pname], err = compile(stack, "dependentSchemas/"+escape(pname))
+ if err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ if r.draft.version >= 2020 {
+ if s.PrefixItems, err = loadSchemas("prefixItems", nil); err != nil {
+ return err
+ }
+ if s.Items2020, err = loadSchema("items", nil); err != nil {
+ return err
+ }
+ } else {
+ if items, ok := m["items"]; ok {
+ switch items.(type) {
+ case []interface{}:
+ s.Items, err = loadSchemas("items", nil)
+ if err != nil {
+ return err
+ }
+ if additionalItems, ok := m["additionalItems"]; ok {
+ switch additionalItems := additionalItems.(type) {
+ case bool:
+ s.AdditionalItems = additionalItems
+ case map[string]interface{}:
+ s.AdditionalItems, err = compile(nil, "additionalItems")
+ if err != nil {
+ return err
+ }
+ }
+ }
+ default:
+ s.Items, err = compile(nil, "items")
+ if err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ }
+
+ // unevaluatedXXX keywords were in "applicator" vocab in 2019, but moved to new vocab "unevaluated" in 2020
+ if (r.draft.version == 2019 && r.schema.meta.hasVocab("applicator")) || (r.draft.version >= 2020 && r.schema.meta.hasVocab("unevaluated")) {
+ if s.UnevaluatedProperties, err = loadSchema("unevaluatedProperties", nil); err != nil {
+ return err
+ }
+ if s.UnevaluatedItems, err = loadSchema("unevaluatedItems", nil); err != nil {
+ return err
+ }
+ if r.draft.version >= 2020 {
+ // any item in an array that passes validation of the contains schema is considered "evaluated"
+ s.ContainsEval = true
+ }
+ }
+
+ if format, ok := m["format"]; ok {
+ s.Format = format.(string)
+ if r.draft.version < 2019 || c.AssertFormat || r.schema.meta.hasVocab("format-assertion") {
+ s.format, _ = Formats[s.Format]
+ }
+ }
+
+ if c.ExtractAnnotations {
+ if title, ok := m["title"]; ok {
+ s.Title = title.(string)
+ }
+ if description, ok := m["description"]; ok {
+ s.Description = description.(string)
+ }
+ s.Default = m["default"]
+ }
+
+ if r.draft.version >= 6 {
+ if c, ok := m["const"]; ok {
+ s.Constant = []interface{}{c}
+ }
+ }
+
+ if r.draft.version >= 7 {
+ if encoding, ok := m["contentEncoding"]; ok {
+ s.ContentEncoding = encoding.(string)
+ s.decoder, _ = Decoders[s.ContentEncoding]
+ }
+ if mediaType, ok := m["contentMediaType"]; ok {
+ s.ContentMediaType = mediaType.(string)
+ s.mediaType, _ = MediaTypes[s.ContentMediaType]
+ if s.ContentSchema, err = loadSchema("contentSchema", stack); err != nil {
+ return err
+ }
+ }
+ if c.ExtractAnnotations {
+ if comment, ok := m["$comment"]; ok {
+ s.Comment = comment.(string)
+ }
+ if readOnly, ok := m["readOnly"]; ok {
+ s.ReadOnly = readOnly.(bool)
+ }
+ if writeOnly, ok := m["writeOnly"]; ok {
+ s.WriteOnly = writeOnly.(bool)
+ }
+ if examples, ok := m["examples"]; ok {
+ s.Examples = examples.([]interface{})
+ }
+ }
+ }
+
+ if r.draft.version >= 2019 {
+ if !c.AssertContent {
+ s.decoder = nil
+ s.mediaType = nil
+ s.ContentSchema = nil
+ }
+ if c.ExtractAnnotations {
+ if deprecated, ok := m["deprecated"]; ok {
+ s.Deprecated = deprecated.(bool)
+ }
+ }
+ }
+
+ for name, ext := range c.extensions {
+ es, err := ext.compiler.Compile(CompilerContext{c, r, stack, res}, m)
+ if err != nil {
+ return err
+ }
+ if es != nil {
+ if s.Extensions == nil {
+ s.Extensions = make(map[string]ExtSchema)
+ }
+ s.Extensions[name] = es
+ }
+ }
+
+ return nil
+}
+
+func (c *Compiler) validateSchema(r *resource, v interface{}, vloc string) error {
+ validate := func(meta *Schema) error {
+ if meta == nil {
+ return nil
+ }
+ return meta.validateValue(v, vloc)
+ }
+
+ if err := validate(r.draft.meta); err != nil {
+ return err
+ }
+ for _, ext := range c.extensions {
+ if err := validate(ext.meta); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func toStrings(arr []interface{}) []string {
+ s := make([]string, len(arr))
+ for i, v := range arr {
+ s[i] = v.(string)
+ }
+ return s
+}
+
+// SchemaRef captures schema and the path referring to it.
+type schemaRef struct {
+ path string // relative-json-pointer to schema
+ schema *Schema // target schema
+ discard bool // true when scope left
+}
+
+func (sr schemaRef) String() string {
+ return fmt.Sprintf("(%s)%v", sr.path, sr.schema)
+}
+
+func checkLoop(stack []schemaRef, sref schemaRef) error {
+ for _, ref := range stack {
+ if ref.schema == sref.schema {
+ return infiniteLoopError(stack, sref)
+ }
+ }
+ return nil
+}
+
+func keywordLocation(stack []schemaRef, path string) string {
+ var loc string
+ for _, ref := range stack[1:] {
+ loc += "/" + ref.path
+ }
+ if path != "" {
+ loc = loc + "/" + path
+ }
+ return loc
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
new file mode 100644
index 0000000..7570b8b
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
@@ -0,0 +1,29 @@
+package jsonschema
+
+import (
+ "encoding/base64"
+ "encoding/json"
+)
+
+// Decoders is a registry of functions, which know how to decode
+// string encoded in specific format.
+//
+// New Decoders can be registered by adding to this map. Key is encoding name,
+// value is function that knows how to decode string in that format.
+var Decoders = map[string]func(string) ([]byte, error){
+ "base64": base64.StdEncoding.DecodeString,
+}
+
+// MediaTypes is a registry of functions, which know how to validate
+// whether the bytes represent data of that mediaType.
+//
+// New mediaTypes can be registered by adding to this map. Key is mediaType name,
+// value is function that knows how to validate that mediaType.
+var MediaTypes = map[string]func([]byte) error{
+ "application/json": validateJSON,
+}
+
+func validateJSON(b []byte) error {
+ var v interface{}
+ return json.Unmarshal(b, &v)
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
new file mode 100644
index 0000000..a124262
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
@@ -0,0 +1,49 @@
+/*
+Package jsonschema provides json-schema compilation and validation.
+
+Features:
+ - implements draft 2020-12, 2019-09, draft-7, draft-6, draft-4
+ - fully compliant with JSON-Schema-Test-Suite, (excluding some optional)
+ - list of optional tests that are excluded can be found in schema_test.go(variable skipTests)
+ - validates schemas against meta-schema
+ - full support of remote references
+ - support of recursive references between schemas
+ - detects infinite loop in schemas
+ - thread safe validation
+ - rich, intuitive hierarchial error messages with json-pointers to exact location
+ - supports output formats flag, basic and detailed
+ - supports enabling format and content Assertions in draft2019-09 or above
+ - change Compiler.AssertFormat, Compiler.AssertContent to true
+ - compiled schema can be introspected. easier to develop tools like generating go structs given schema
+ - supports user-defined keywords via extensions
+ - implements following formats (supports user-defined)
+ - date-time, date, time, duration (supports leap-second)
+ - uuid, hostname, email
+ - ip-address, ipv4, ipv6
+ - uri, uriref, uri-template(limited validation)
+ - json-pointer, relative-json-pointer
+ - regex, format
+ - implements following contentEncoding (supports user-defined)
+ - base64
+ - implements following contentMediaType (supports user-defined)
+ - application/json
+ - can load from files/http/https/string/[]byte/io.Reader (supports user-defined)
+
+The schema is compiled against the version specified in "$schema" property.
+If "$schema" property is missing, it uses latest draft which currently implemented
+by this library.
+
+You can force to use specific draft, when "$schema" is missing, as follows:
+
+ compiler := jsonschema.NewCompiler()
+ compiler.Draft = jsonschema.Draft4
+
+This package supports loading json-schema from filePath and fileURL.
+
+To load json-schema from HTTPURL, add following import:
+
+ import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
+
+you can validate yaml documents. see https://play.golang.org/p/sJy1qY7dXgA
+*/
+package jsonschema
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
new file mode 100644
index 0000000..eaa9168
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
@@ -0,0 +1,1432 @@
+package jsonschema
+
+import (
+ "strconv"
+ "strings"
+)
+
+// A Draft represents json-schema draft
+type Draft struct {
+ version int
+ meta *Schema
+ id string // property name used to represent schema id.
+ boolSchema bool // is boolean valid schema
+ vocab []string // built-in vocab
+ defaultVocab []string // vocabs when $vocabulary is not used
+ subschemas map[string]position
+}
+
+func (d *Draft) loadMeta(url, schema string) {
+ c := NewCompiler()
+ c.AssertFormat = true
+ if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
+ panic(err)
+ }
+ d.meta = c.MustCompile(url)
+ d.meta.meta = d.meta
+}
+
+func (d *Draft) getID(sch interface{}) string {
+ m, ok := sch.(map[string]interface{})
+ if !ok {
+ return ""
+ }
+ if _, ok := m["$ref"]; ok && d.version <= 7 {
+ // $ref prevents a sibling id from changing the base uri
+ return ""
+ }
+ v, ok := m[d.id]
+ if !ok {
+ return ""
+ }
+ id, ok := v.(string)
+ if !ok {
+ return ""
+ }
+ return id
+}
+
+func (d *Draft) resolveID(base string, sch interface{}) (string, error) {
+ id, _ := split(d.getID(sch)) // strip fragment
+ if id == "" {
+ return "", nil
+ }
+ url, err := resolveURL(base, id)
+ url, _ = split(url) // strip fragment
+ return url, err
+}
+
+func (d *Draft) anchors(sch interface{}) []string {
+ m, ok := sch.(map[string]interface{})
+ if !ok {
+ return nil
+ }
+
+ var anchors []string
+
+ // before draft2019, anchor is specified in id
+ _, f := split(d.getID(m))
+ if f != "#" {
+ anchors = append(anchors, f[1:])
+ }
+
+ if v, ok := m["$anchor"]; ok && d.version >= 2019 {
+ anchors = append(anchors, v.(string))
+ }
+ if v, ok := m["$dynamicAnchor"]; ok && d.version >= 2020 {
+ anchors = append(anchors, v.(string))
+ }
+ return anchors
+}
+
+// listSubschemas collects subschemas in r into rr.
+func (d *Draft) listSubschemas(r *resource, base string, rr map[string]*resource) error {
+ add := func(loc string, sch interface{}) error {
+ url, err := d.resolveID(base, sch)
+ if err != nil {
+ return err
+ }
+ floc := r.floc + "/" + loc
+ sr := &resource{url: url, floc: floc, doc: sch}
+ rr[floc] = sr
+
+ base := base
+ if url != "" {
+ base = url
+ }
+ return d.listSubschemas(sr, base, rr)
+ }
+
+ sch, ok := r.doc.(map[string]interface{})
+ if !ok {
+ return nil
+ }
+ for kw, pos := range d.subschemas {
+ v, ok := sch[kw]
+ if !ok {
+ continue
+ }
+ if pos&self != 0 {
+ switch v := v.(type) {
+ case map[string]interface{}:
+ if err := add(kw, v); err != nil {
+ return err
+ }
+ case bool:
+ if d.boolSchema {
+ if err := add(kw, v); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ if pos&item != 0 {
+ if v, ok := v.([]interface{}); ok {
+ for i, item := range v {
+ if err := add(kw+"/"+strconv.Itoa(i), item); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ if pos&prop != 0 {
+ if v, ok := v.(map[string]interface{}); ok {
+ for pname, pval := range v {
+ if err := add(kw+"/"+escape(pname), pval); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// isVocab tells whether url is built-in vocab.
+func (d *Draft) isVocab(url string) bool {
+ for _, v := range d.vocab {
+ if url == v {
+ return true
+ }
+ }
+ return false
+}
+
+type position uint
+
+const (
+ self position = 1 << iota
+ prop
+ item
+)
+
+// supported drafts
+var (
+ Draft4 = &Draft{version: 4, id: "id", boolSchema: false}
+ Draft6 = &Draft{version: 6, id: "$id", boolSchema: true}
+ Draft7 = &Draft{version: 7, id: "$id", boolSchema: true}
+ Draft2019 = &Draft{
+ version: 2019,
+ id: "$id",
+ boolSchema: true,
+ vocab: []string{
+ "https://json-schema.org/draft/2019-09/vocab/core",
+ "https://json-schema.org/draft/2019-09/vocab/applicator",
+ "https://json-schema.org/draft/2019-09/vocab/validation",
+ "https://json-schema.org/draft/2019-09/vocab/meta-data",
+ "https://json-schema.org/draft/2019-09/vocab/format",
+ "https://json-schema.org/draft/2019-09/vocab/content",
+ },
+ defaultVocab: []string{
+ "https://json-schema.org/draft/2019-09/vocab/core",
+ "https://json-schema.org/draft/2019-09/vocab/applicator",
+ "https://json-schema.org/draft/2019-09/vocab/validation",
+ },
+ }
+ Draft2020 = &Draft{
+ version: 2020,
+ id: "$id",
+ boolSchema: true,
+ vocab: []string{
+ "https://json-schema.org/draft/2020-12/vocab/core",
+ "https://json-schema.org/draft/2020-12/vocab/applicator",
+ "https://json-schema.org/draft/2020-12/vocab/unevaluated",
+ "https://json-schema.org/draft/2020-12/vocab/validation",
+ "https://json-schema.org/draft/2020-12/vocab/meta-data",
+ "https://json-schema.org/draft/2020-12/vocab/format-annotation",
+ "https://json-schema.org/draft/2020-12/vocab/format-assertion",
+ "https://json-schema.org/draft/2020-12/vocab/content",
+ },
+ defaultVocab: []string{
+ "https://json-schema.org/draft/2020-12/vocab/core",
+ "https://json-schema.org/draft/2020-12/vocab/applicator",
+ "https://json-schema.org/draft/2020-12/vocab/unevaluated",
+ "https://json-schema.org/draft/2020-12/vocab/validation",
+ },
+ }
+
+ latest = Draft2020
+)
+
+func findDraft(url string) *Draft {
+ if strings.HasPrefix(url, "http://") {
+ url = "https://" + strings.TrimPrefix(url, "http://")
+ }
+ if strings.HasSuffix(url, "#") || strings.HasSuffix(url, "#/") {
+ url = url[:strings.IndexByte(url, '#')]
+ }
+ switch url {
+ case "https://json-schema.org/schema":
+ return latest
+ case "https://json-schema.org/draft/2020-12/schema":
+ return Draft2020
+ case "https://json-schema.org/draft/2019-09/schema":
+ return Draft2019
+ case "https://json-schema.org/draft-07/schema":
+ return Draft7
+ case "https://json-schema.org/draft-06/schema":
+ return Draft6
+ case "https://json-schema.org/draft-04/schema":
+ return Draft4
+ }
+ return nil
+}
+
+func init() {
+ subschemas := map[string]position{
+ // type agnostic
+ "definitions": prop,
+ "not": self,
+ "allOf": item,
+ "anyOf": item,
+ "oneOf": item,
+ // object
+ "properties": prop,
+ "additionalProperties": self,
+ "patternProperties": prop,
+ // array
+ "items": self | item,
+ "additionalItems": self,
+ "dependencies": prop,
+ }
+ Draft4.subschemas = clone(subschemas)
+
+ subschemas["propertyNames"] = self
+ subschemas["contains"] = self
+ Draft6.subschemas = clone(subschemas)
+
+ subschemas["if"] = self
+ subschemas["then"] = self
+ subschemas["else"] = self
+ Draft7.subschemas = clone(subschemas)
+
+ subschemas["$defs"] = prop
+ subschemas["dependentSchemas"] = prop
+ subschemas["unevaluatedProperties"] = self
+ subschemas["unevaluatedItems"] = self
+ Draft2019.subschemas = clone(subschemas)
+
+ subschemas["prefixItems"] = item
+ Draft2020.subschemas = clone(subschemas)
+
+ Draft4.loadMeta("http://json-schema.org/draft-04/schema", `{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "description": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "positiveInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "positiveIntegerDefault0": {
+ "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
+ },
+ "simpleTypes": {
+ "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ },
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "format": "uriref"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": {},
+ "multipleOf": {
+ "type": "number",
+ "minimum": 0,
+ "exclusiveMinimum": true
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "boolean",
+ "default": false
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "boolean",
+ "default": false
+ },
+ "maxLength": { "$ref": "#/definitions/positiveInteger" },
+ "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": {
+ "anyOf": [
+ { "type": "boolean" },
+ { "$ref": "#" }
+ ],
+ "default": {}
+ },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": {}
+ },
+ "maxItems": { "$ref": "#/definitions/positiveInteger" },
+ "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "maxProperties": { "$ref": "#/definitions/positiveInteger" },
+ "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": {
+ "anyOf": [
+ { "type": "boolean" },
+ { "$ref": "#" }
+ ],
+ "default": {}
+ },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "regexProperties": true,
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "regexProperties": { "type": "boolean" },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "enum": {
+ "type": "array",
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" },
+ "format": { "type": "string" },
+ "$ref": { "type": "string" }
+ },
+ "dependencies": {
+ "exclusiveMaximum": [ "maximum" ],
+ "exclusiveMinimum": [ "minimum" ]
+ },
+ "default": {}
+ }`)
+ Draft6.loadMeta("http://json-schema.org/draft-06/schema", `{
+ "$schema": "http://json-schema.org/draft-06/schema#",
+ "$id": "http://json-schema.org/draft-06/schema#",
+ "title": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "allOf": [
+ { "$ref": "#/definitions/nonNegativeInteger" },
+ { "default": 0 }
+ ]
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ },
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": {},
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": { "$ref": "#" },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": {}
+ },
+ "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "contains": { "$ref": "#" },
+ "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": { "$ref": "#" },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "regexProperties": true,
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "propertyNames": { "$ref": "#" },
+ "const": {},
+ "enum": {
+ "type": "array",
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "format": { "type": "string" },
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" }
+ },
+ "default": {}
+ }`)
+ Draft7.loadMeta("http://json-schema.org/draft-07/schema", `{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "http://json-schema.org/draft-07/schema#",
+ "title": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "allOf": [
+ { "$ref": "#/definitions/nonNegativeInteger" },
+ { "default": 0 }
+ ]
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ },
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$comment": {
+ "type": "string"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": true,
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "writeOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "examples": {
+ "type": "array",
+ "items": true
+ },
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": { "$ref": "#" },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": true
+ },
+ "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "contains": { "$ref": "#" },
+ "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": { "$ref": "#" },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "propertyNames": { "$ref": "#" },
+ "const": true,
+ "enum": {
+ "type": "array",
+ "items": true,
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "format": { "type": "string" },
+ "contentMediaType": { "type": "string" },
+ "contentEncoding": { "type": "string" },
+ "if": { "$ref": "#" },
+ "then": { "$ref": "#" },
+ "else": { "$ref": "#" },
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" }
+ },
+ "default": true
+ }`)
+ Draft2019.loadMeta("https://json-schema.org/draft/2019-09/schema", `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/schema",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/core": true,
+ "https://json-schema.org/draft/2019-09/vocab/applicator": true,
+ "https://json-schema.org/draft/2019-09/vocab/validation": true,
+ "https://json-schema.org/draft/2019-09/vocab/meta-data": true,
+ "https://json-schema.org/draft/2019-09/vocab/format": false,
+ "https://json-schema.org/draft/2019-09/vocab/content": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Core and Validation specifications meta-schema",
+ "allOf": [
+ {"$ref": "meta/core"},
+ {"$ref": "meta/applicator"},
+ {"$ref": "meta/validation"},
+ {"$ref": "meta/meta-data"},
+ {"$ref": "meta/format"},
+ {"$ref": "meta/content"}
+ ],
+ "type": ["object", "boolean"],
+ "properties": {
+ "definitions": {
+ "$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.",
+ "type": "object",
+ "additionalProperties": { "$recursiveRef": "#" },
+ "default": {}
+ },
+ "dependencies": {
+ "$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"",
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$recursiveRef": "#" },
+ { "$ref": "meta/validation#/$defs/stringArray" }
+ ]
+ }
+ }
+ }
+ }`)
+ Draft2020.loadMeta("https://json-schema.org/draft/2020-12/schema", `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/schema",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/core": true,
+ "https://json-schema.org/draft/2020-12/vocab/applicator": true,
+ "https://json-schema.org/draft/2020-12/vocab/unevaluated": true,
+ "https://json-schema.org/draft/2020-12/vocab/validation": true,
+ "https://json-schema.org/draft/2020-12/vocab/meta-data": true,
+ "https://json-schema.org/draft/2020-12/vocab/format-annotation": true,
+ "https://json-schema.org/draft/2020-12/vocab/content": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Core and Validation specifications meta-schema",
+ "allOf": [
+ {"$ref": "meta/core"},
+ {"$ref": "meta/applicator"},
+ {"$ref": "meta/unevaluated"},
+ {"$ref": "meta/validation"},
+ {"$ref": "meta/meta-data"},
+ {"$ref": "meta/format-annotation"},
+ {"$ref": "meta/content"}
+ ],
+ "type": ["object", "boolean"],
+ "$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.",
+ "properties": {
+ "definitions": {
+ "$comment": "\"definitions\" has been replaced by \"$defs\".",
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "deprecated": true,
+ "default": {}
+ },
+ "dependencies": {
+ "$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.",
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$dynamicRef": "#meta" },
+ { "$ref": "meta/validation#/$defs/stringArray" }
+ ]
+ },
+ "deprecated": true,
+ "default": {}
+ },
+ "$recursiveAnchor": {
+ "$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".",
+ "$ref": "meta/core#/$defs/anchorString",
+ "deprecated": true
+ },
+ "$recursiveRef": {
+ "$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".",
+ "$ref": "meta/core#/$defs/uriReferenceString",
+ "deprecated": true
+ }
+ }
+ }`)
+}
+
+var vocabSchemas = map[string]string{
+ "https://json-schema.org/draft/2019-09/meta/core": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/core",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/core": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Core vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference",
+ "$comment": "Non-empty fragments not allowed.",
+ "pattern": "^[^#]*#?$"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$anchor": {
+ "type": "string",
+ "pattern": "^[A-Za-z][-A-Za-z0-9.:_]*$"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$recursiveRef": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$recursiveAnchor": {
+ "type": "boolean",
+ "default": false
+ },
+ "$vocabulary": {
+ "type": "object",
+ "propertyNames": {
+ "type": "string",
+ "format": "uri"
+ },
+ "additionalProperties": {
+ "type": "boolean"
+ }
+ },
+ "$comment": {
+ "type": "string"
+ },
+ "$defs": {
+ "type": "object",
+ "additionalProperties": { "$recursiveRef": "#" },
+ "default": {}
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/applicator": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/applicator",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/applicator": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Applicator vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "additionalItems": { "$recursiveRef": "#" },
+ "unevaluatedItems": { "$recursiveRef": "#" },
+ "items": {
+ "anyOf": [
+ { "$recursiveRef": "#" },
+ { "$ref": "#/$defs/schemaArray" }
+ ]
+ },
+ "contains": { "$recursiveRef": "#" },
+ "additionalProperties": { "$recursiveRef": "#" },
+ "unevaluatedProperties": { "$recursiveRef": "#" },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$recursiveRef": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$recursiveRef": "#" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependentSchemas": {
+ "type": "object",
+ "additionalProperties": {
+ "$recursiveRef": "#"
+ }
+ },
+ "propertyNames": { "$recursiveRef": "#" },
+ "if": { "$recursiveRef": "#" },
+ "then": { "$recursiveRef": "#" },
+ "else": { "$recursiveRef": "#" },
+ "allOf": { "$ref": "#/$defs/schemaArray" },
+ "anyOf": { "$ref": "#/$defs/schemaArray" },
+ "oneOf": { "$ref": "#/$defs/schemaArray" },
+ "not": { "$recursiveRef": "#" }
+ },
+ "$defs": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$recursiveRef": "#" }
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/validation": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/validation",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/validation": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Validation vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minContains": {
+ "$ref": "#/$defs/nonNegativeInteger",
+ "default": 1
+ },
+ "maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/$defs/stringArray" },
+ "dependentRequired": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/$defs/stringArray"
+ }
+ },
+ "const": true,
+ "enum": {
+ "type": "array",
+ "items": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/$defs/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/$defs/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ }
+ },
+ "$defs": {
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "$ref": "#/$defs/nonNegativeInteger",
+ "default": 0
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/meta-data": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/meta-data",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/meta-data": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Meta-data vocabulary meta-schema",
+
+ "type": ["object", "boolean"],
+ "properties": {
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": true,
+ "deprecated": {
+ "type": "boolean",
+ "default": false
+ },
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "writeOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "examples": {
+ "type": "array",
+ "items": true
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/format": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/format",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/format": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Format vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "format": { "type": "string" }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/content": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/content",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/content": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Content vocabulary meta-schema",
+
+ "type": ["object", "boolean"],
+ "properties": {
+ "contentMediaType": { "type": "string" },
+ "contentEncoding": { "type": "string" },
+ "contentSchema": { "$recursiveRef": "#" }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/core": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/core",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/core": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Core vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "$ref": "#/$defs/uriReferenceString",
+ "$comment": "Non-empty fragments not allowed.",
+ "pattern": "^[^#]*#?$"
+ },
+ "$schema": { "$ref": "#/$defs/uriString" },
+ "$ref": { "$ref": "#/$defs/uriReferenceString" },
+ "$anchor": { "$ref": "#/$defs/anchorString" },
+ "$dynamicRef": { "$ref": "#/$defs/uriReferenceString" },
+ "$dynamicAnchor": { "$ref": "#/$defs/anchorString" },
+ "$vocabulary": {
+ "type": "object",
+ "propertyNames": { "$ref": "#/$defs/uriString" },
+ "additionalProperties": {
+ "type": "boolean"
+ }
+ },
+ "$comment": {
+ "type": "string"
+ },
+ "$defs": {
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" }
+ }
+ },
+ "$defs": {
+ "anchorString": {
+ "type": "string",
+ "pattern": "^[A-Za-z_][-A-Za-z0-9._]*$"
+ },
+ "uriString": {
+ "type": "string",
+ "format": "uri"
+ },
+ "uriReferenceString": {
+ "type": "string",
+ "format": "uri-reference"
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/applicator": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/applicator",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/applicator": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Applicator vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "prefixItems": { "$ref": "#/$defs/schemaArray" },
+ "items": { "$dynamicRef": "#meta" },
+ "contains": { "$dynamicRef": "#meta" },
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependentSchemas": {
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "default": {}
+ },
+ "propertyNames": { "$dynamicRef": "#meta" },
+ "if": { "$dynamicRef": "#meta" },
+ "then": { "$dynamicRef": "#meta" },
+ "else": { "$dynamicRef": "#meta" },
+ "allOf": { "$ref": "#/$defs/schemaArray" },
+ "anyOf": { "$ref": "#/$defs/schemaArray" },
+ "oneOf": { "$ref": "#/$defs/schemaArray" },
+ "not": { "$dynamicRef": "#meta" }
+ },
+ "$defs": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$dynamicRef": "#meta" }
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/unevaluated": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/unevaluated",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/unevaluated": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Unevaluated applicator vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "unevaluatedItems": { "$dynamicRef": "#meta" },
+ "unevaluatedProperties": { "$dynamicRef": "#meta" }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/validation": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/validation",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/validation": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Validation vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "type": {
+ "anyOf": [
+ { "$ref": "#/$defs/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/$defs/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "const": true,
+ "enum": {
+ "type": "array",
+ "items": true
+ },
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minContains": {
+ "$ref": "#/$defs/nonNegativeInteger",
+ "default": 1
+ },
+ "maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/$defs/stringArray" },
+ "dependentRequired": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/$defs/stringArray"
+ }
+ }
+ },
+ "$defs": {
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "$ref": "#/$defs/nonNegativeInteger",
+ "default": 0
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/meta-data": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/meta-data",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/meta-data": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Meta-data vocabulary meta-schema",
+
+ "type": ["object", "boolean"],
+ "properties": {
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": true,
+ "deprecated": {
+ "type": "boolean",
+ "default": false
+ },
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "writeOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "examples": {
+ "type": "array",
+ "items": true
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/format-annotation": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/format-annotation",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/format-annotation": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Format vocabulary meta-schema for annotation results",
+ "type": ["object", "boolean"],
+ "properties": {
+ "format": { "type": "string" }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/format-assertion": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/format-assertion",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/format-assertion": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Format vocabulary meta-schema for assertion results",
+ "type": ["object", "boolean"],
+ "properties": {
+ "format": { "type": "string" }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/content": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/content",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/content": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Content vocabulary meta-schema",
+
+ "type": ["object", "boolean"],
+ "properties": {
+ "contentEncoding": { "type": "string" },
+ "contentMediaType": { "type": "string" },
+ "contentSchema": { "$dynamicRef": "#meta" }
+ }
+ }`,
+}
+
+func clone(m map[string]position) map[string]position {
+ mm := make(map[string]position)
+ for k, v := range m {
+ mm[k] = v
+ }
+ return mm
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
new file mode 100644
index 0000000..deaded8
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
@@ -0,0 +1,129 @@
+package jsonschema
+
+import (
+ "fmt"
+ "strings"
+)
+
+// InvalidJSONTypeError is the error type returned by ValidateInterface.
+// this tells that specified go object is not valid jsonType.
+type InvalidJSONTypeError string
+
+func (e InvalidJSONTypeError) Error() string {
+ return fmt.Sprintf("jsonschema: invalid jsonType: %s", string(e))
+}
+
+// InfiniteLoopError is returned by Compile/Validate.
+// this gives url#keywordLocation that lead to infinity loop.
+type InfiniteLoopError string
+
+func (e InfiniteLoopError) Error() string {
+ return "jsonschema: infinite loop " + string(e)
+}
+
+func infiniteLoopError(stack []schemaRef, sref schemaRef) InfiniteLoopError {
+ var path string
+ for _, ref := range stack {
+ if path == "" {
+ path += ref.schema.Location
+ } else {
+ path += "/" + ref.path
+ }
+ }
+ return InfiniteLoopError(path + "/" + sref.path)
+}
+
+// SchemaError is the error type returned by Compile.
+type SchemaError struct {
+ // SchemaURL is the url to json-schema that filed to compile.
+ // This is helpful, if your schema refers to external schemas
+ SchemaURL string
+
+ // Err is the error that occurred during compilation.
+ // It could be ValidationError, because compilation validates
+ // given schema against the json meta-schema
+ Err error
+}
+
+func (se *SchemaError) Unwrap() error {
+ return se.Err
+}
+
+func (se *SchemaError) Error() string {
+ s := fmt.Sprintf("jsonschema %s compilation failed", se.SchemaURL)
+ if se.Err != nil {
+ return fmt.Sprintf("%s: %v", s, strings.TrimPrefix(se.Err.Error(), "jsonschema: "))
+ }
+ return s
+}
+
+func (se *SchemaError) GoString() string {
+ if _, ok := se.Err.(*ValidationError); ok {
+ return fmt.Sprintf("jsonschema %s compilation failed\n%#v", se.SchemaURL, se.Err)
+ }
+ return se.Error()
+}
+
+// ValidationError is the error type returned by Validate.
+type ValidationError struct {
+ KeywordLocation string // validation path of validating keyword or schema
+ AbsoluteKeywordLocation string // absolute location of validating keyword or schema
+ InstanceLocation string // location of the json value within the instance being validated
+ Message string // describes error
+ Causes []*ValidationError // nested validation errors
+}
+
+func (ve *ValidationError) add(causes ...error) error {
+ for _, cause := range causes {
+ ve.Causes = append(ve.Causes, cause.(*ValidationError))
+ }
+ return ve
+}
+
+func (ve *ValidationError) causes(err error) error {
+ if err := err.(*ValidationError); err.Message == "" {
+ ve.Causes = err.Causes
+ } else {
+ ve.add(err)
+ }
+ return ve
+}
+
+func (ve *ValidationError) Error() string {
+ leaf := ve
+ for len(leaf.Causes) > 0 {
+ leaf = leaf.Causes[0]
+ }
+ u, _ := split(ve.AbsoluteKeywordLocation)
+ return fmt.Sprintf("jsonschema: %s does not validate with %s: %s", quote(leaf.InstanceLocation), u+"#"+leaf.KeywordLocation, leaf.Message)
+}
+
+func (ve *ValidationError) GoString() string {
+ sloc := ve.AbsoluteKeywordLocation
+ sloc = sloc[strings.IndexByte(sloc, '#')+1:]
+ msg := fmt.Sprintf("[I#%s] [S#%s] %s", ve.InstanceLocation, sloc, ve.Message)
+ for _, c := range ve.Causes {
+ for _, line := range strings.Split(c.GoString(), "\n") {
+ msg += "\n " + line
+ }
+ }
+ return msg
+}
+
+func joinPtr(ptr1, ptr2 string) string {
+ if len(ptr1) == 0 {
+ return ptr2
+ }
+ if len(ptr2) == 0 {
+ return ptr1
+ }
+ return ptr1 + "/" + ptr2
+}
+
+// quote returns single-quoted string
+func quote(s string) string {
+ s = fmt.Sprintf("%q", s)
+ s = strings.ReplaceAll(s, `\"`, `"`)
+ s = strings.ReplaceAll(s, `'`, `\'`)
+ return "'" + s[1:len(s)-1] + "'"
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
new file mode 100644
index 0000000..452ba11
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
@@ -0,0 +1,116 @@
+package jsonschema
+
+// ExtCompiler compiles custom keyword(s) into ExtSchema.
+type ExtCompiler interface {
+ // Compile compiles the custom keywords in schema m and returns its compiled representation.
+ // if the schema m does not contain the keywords defined by this extension,
+ // compiled representation nil should be returned.
+ Compile(ctx CompilerContext, m map[string]interface{}) (ExtSchema, error)
+}
+
+// ExtSchema is schema representation of custom keyword(s)
+type ExtSchema interface {
+ // Validate validates the json value v with this ExtSchema.
+ // Returned error must be *ValidationError.
+ Validate(ctx ValidationContext, v interface{}) error
+}
+
+type extension struct {
+ meta *Schema
+ compiler ExtCompiler
+}
+
+// RegisterExtension registers custom keyword(s) into this compiler.
+//
+// name is extension name, used only to avoid name collisions.
+// meta captures the metaschema for the new keywords.
+// This is used to validate the schema before calling ext.Compile.
+func (c *Compiler) RegisterExtension(name string, meta *Schema, ext ExtCompiler) {
+ c.extensions[name] = extension{meta, ext}
+}
+
+// CompilerContext ---
+
+// CompilerContext provides additional context required in compiling for extension.
+type CompilerContext struct {
+ c *Compiler
+ r *resource
+ stack []schemaRef
+ res *resource
+}
+
+// Compile compiles given value at ptr into *Schema. This is useful in implementing
+// keyword like allOf/not/patternProperties.
+//
+// schPath is the relative-json-pointer to the schema to be compiled from parent schema.
+//
+// applicableOnSameInstance tells whether current schema and the given schema
+// are applied on same instance value. this is used to detect infinite loop in schema.
+func (ctx CompilerContext) Compile(schPath string, applicableOnSameInstance bool) (*Schema, error) {
+ var stack []schemaRef
+ if applicableOnSameInstance {
+ stack = ctx.stack
+ }
+ return ctx.c.compileRef(ctx.r, stack, schPath, ctx.res, ctx.r.url+ctx.res.floc+"/"+schPath)
+}
+
+// CompileRef compiles the schema referenced by ref uri
+//
+// refPath is the relative-json-pointer to ref.
+//
+// applicableOnSameInstance tells whether current schema and the given schema
+// are applied on same instance value. this is used to detect infinite loop in schema.
+func (ctx CompilerContext) CompileRef(ref string, refPath string, applicableOnSameInstance bool) (*Schema, error) {
+ var stack []schemaRef
+ if applicableOnSameInstance {
+ stack = ctx.stack
+ }
+ return ctx.c.compileRef(ctx.r, stack, refPath, ctx.res, ref)
+}
+
+// ValidationContext ---
+
+// ValidationContext provides additional context required in validating for extension.
+type ValidationContext struct {
+ result validationResult
+ validate func(sch *Schema, schPath string, v interface{}, vpath string) error
+ validateInplace func(sch *Schema, schPath string) error
+ validationError func(keywordPath string, format string, a ...interface{}) *ValidationError
+}
+
+// EvaluatedProp marks given property of object as evaluated.
+func (ctx ValidationContext) EvaluatedProp(prop string) {
+ delete(ctx.result.unevalProps, prop)
+}
+
+// EvaluatedItem marks given index of array as evaluated.
+func (ctx ValidationContext) EvaluatedItem(index int) {
+ delete(ctx.result.unevalItems, index)
+}
+
+// Validate validates schema s with value v. Extension must use this method instead of
+// *Schema.ValidateInterface method. This will be useful in implementing keywords like
+// allOf/oneOf
+//
+// spath is relative-json-pointer to s
+// vpath is relative-json-pointer to v.
+func (ctx ValidationContext) Validate(s *Schema, spath string, v interface{}, vpath string) error {
+ if vpath == "" {
+ return ctx.validateInplace(s, spath)
+ }
+ return ctx.validate(s, spath, v, vpath)
+}
+
+// Error used to construct validation error by extensions.
+//
+// keywordPath is relative-json-pointer to keyword.
+func (ctx ValidationContext) Error(keywordPath string, format string, a ...interface{}) *ValidationError {
+ return ctx.validationError(keywordPath, format, a...)
+}
+
+// Group is used by extensions to group multiple errors as causes to parent error.
+// This is useful in implementing keywords like allOf where each schema specified
+// in allOf can result a validationError.
+func (ValidationError) Group(parent *ValidationError, causes ...error) error {
+ return parent.add(causes...)
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
new file mode 100644
index 0000000..0568607
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
@@ -0,0 +1,567 @@
+package jsonschema
+
+import (
+ "errors"
+ "net"
+ "net/mail"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// Formats is a registry of functions, which know how to validate
+// a specific format.
+//
+// New Formats can be registered by adding to this map. Key is format name,
+// value is function that knows how to validate that format.
+var Formats = map[string]func(interface{}) bool{
+ "date-time": isDateTime,
+ "date": isDate,
+ "time": isTime,
+ "duration": isDuration,
+ "period": isPeriod,
+ "hostname": isHostname,
+ "email": isEmail,
+ "ip-address": isIPV4,
+ "ipv4": isIPV4,
+ "ipv6": isIPV6,
+ "uri": isURI,
+ "iri": isURI,
+ "uri-reference": isURIReference,
+ "uriref": isURIReference,
+ "iri-reference": isURIReference,
+ "uri-template": isURITemplate,
+ "regex": isRegex,
+ "json-pointer": isJSONPointer,
+ "relative-json-pointer": isRelativeJSONPointer,
+ "uuid": isUUID,
+}
+
+// isDateTime tells whether given string is a valid date representation
+// as defined by RFC 3339, section 5.6.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
+func isDateTime(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if len(s) < 20 { // yyyy-mm-ddThh:mm:ssZ
+ return false
+ }
+ if s[10] != 'T' && s[10] != 't' {
+ return false
+ }
+ return isDate(s[:10]) && isTime(s[11:])
+}
+
+// isDate tells whether given string is a valid full-date production
+// as defined by RFC 3339, section 5.6.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
+func isDate(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ _, err := time.Parse("2006-01-02", s)
+ return err == nil
+}
+
+// isTime tells whether given string is a valid full-time production
+// as defined by RFC 3339, section 5.6.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
+func isTime(v interface{}) bool {
+ str, ok := v.(string)
+ if !ok {
+ return true
+ }
+
+ // golang time package does not support leap seconds.
+ // so we are parsing it manually here.
+
+ // hh:mm:ss
+ // 01234567
+ if len(str) < 9 || str[2] != ':' || str[5] != ':' {
+ return false
+ }
+ isInRange := func(str string, min, max int) (int, bool) {
+ n, err := strconv.Atoi(str)
+ if err != nil {
+ return 0, false
+ }
+ if n < min || n > max {
+ return 0, false
+ }
+ return n, true
+ }
+ var h, m, s int
+ if h, ok = isInRange(str[0:2], 0, 23); !ok {
+ return false
+ }
+ if m, ok = isInRange(str[3:5], 0, 59); !ok {
+ return false
+ }
+ if s, ok = isInRange(str[6:8], 0, 60); !ok {
+ return false
+ }
+ str = str[8:]
+
+ // parse secfrac if present
+ if str[0] == '.' {
+ // dot following more than one digit
+ str = str[1:]
+ var numDigits int
+ for str != "" {
+ if str[0] < '0' || str[0] > '9' {
+ break
+ }
+ numDigits++
+ str = str[1:]
+ }
+ if numDigits == 0 {
+ return false
+ }
+ }
+
+ if len(str) == 0 {
+ return false
+ }
+
+ if str[0] == 'z' || str[0] == 'Z' {
+ if len(str) != 1 {
+ return false
+ }
+ } else {
+ // time-numoffset
+ // +hh:mm
+ // 012345
+ if len(str) != 6 || str[3] != ':' {
+ return false
+ }
+
+ var sign int
+ if str[0] == '+' {
+ sign = -1
+ } else if str[0] == '-' {
+ sign = +1
+ } else {
+ return false
+ }
+
+ var zh, zm int
+ if zh, ok = isInRange(str[1:3], 0, 23); !ok {
+ return false
+ }
+ if zm, ok = isInRange(str[4:6], 0, 59); !ok {
+ return false
+ }
+
+ // apply timezone offset
+ hm := (h*60 + m) + sign*(zh*60+zm)
+ if hm < 0 {
+ hm += 24 * 60
+ }
+ h, m = hm/60, hm%60
+ }
+
+ // check leapsecond
+ if s == 60 { // leap second
+ if h != 23 || m != 59 {
+ return false
+ }
+ }
+
+ return true
+}
+
+// isDuration tells whether given string is a valid duration format
+// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
+func isDuration(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if len(s) == 0 || s[0] != 'P' {
+ return false
+ }
+ s = s[1:]
+ parseUnits := func() (units string, ok bool) {
+ for len(s) > 0 && s[0] != 'T' {
+ digits := false
+ for {
+ if len(s) == 0 {
+ break
+ }
+ if s[0] < '0' || s[0] > '9' {
+ break
+ }
+ digits = true
+ s = s[1:]
+ }
+ if !digits || len(s) == 0 {
+ return units, false
+ }
+ units += s[:1]
+ s = s[1:]
+ }
+ return units, true
+ }
+ units, ok := parseUnits()
+ if !ok {
+ return false
+ }
+ if units == "W" {
+ return len(s) == 0 // P_W
+ }
+ if len(units) > 0 {
+ if strings.Index("YMD", units) == -1 {
+ return false
+ }
+ if len(s) == 0 {
+ return true // "P" dur-date
+ }
+ }
+ if len(s) == 0 || s[0] != 'T' {
+ return false
+ }
+ s = s[1:]
+ units, ok = parseUnits()
+ return ok && len(s) == 0 && len(units) > 0 && strings.Index("HMS", units) != -1
+}
+
+// isPeriod tells whether given string is a valid period format
+// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
+func isPeriod(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ slash := strings.IndexByte(s, '/')
+ if slash == -1 {
+ return false
+ }
+ start, end := s[:slash], s[slash+1:]
+ if isDateTime(start) {
+ return isDateTime(end) || isDuration(end)
+ }
+ return isDuration(start) && isDateTime(end)
+}
+
+// isHostname tells whether given string is a valid representation
+// for an Internet host name, as defined by RFC 1034 section 3.1 and
+// RFC 1123 section 2.1.
+//
+// See https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names, for details.
+func isHostname(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ // entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters
+ s = strings.TrimSuffix(s, ".")
+ if len(s) > 253 {
+ return false
+ }
+
+ // Hostnames are composed of series of labels concatenated with dots, as are all domain names
+ for _, label := range strings.Split(s, ".") {
+ // Each label must be from 1 to 63 characters long
+ if labelLen := len(label); labelLen < 1 || labelLen > 63 {
+ return false
+ }
+
+ // labels must not start with a hyphen
+ // RFC 1123 section 2.1: restriction on the first character
+ // is relaxed to allow either a letter or a digit
+ if first := s[0]; first == '-' {
+ return false
+ }
+
+ // must not end with a hyphen
+ if label[len(label)-1] == '-' {
+ return false
+ }
+
+ // labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner),
+ // the digits '0' through '9', and the hyphen ('-')
+ for _, c := range label {
+ if valid := (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '-'); !valid {
+ return false
+ }
+ }
+ }
+
+ return true
+}
+
+// isEmail tells whether given string is a valid Internet email address
+// as defined by RFC 5322, section 3.4.1.
+//
+// See https://en.wikipedia.org/wiki/Email_address, for details.
+func isEmail(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ // entire email address to be no more than 254 characters long
+ if len(s) > 254 {
+ return false
+ }
+
+ // email address is generally recognized as having two parts joined with an at-sign
+ at := strings.LastIndexByte(s, '@')
+ if at == -1 {
+ return false
+ }
+ local := s[0:at]
+ domain := s[at+1:]
+
+ // local part may be up to 64 characters long
+ if len(local) > 64 {
+ return false
+ }
+
+ // domain if enclosed in brackets, must match an IP address
+ if len(domain) >= 2 && domain[0] == '[' && domain[len(domain)-1] == ']' {
+ ip := domain[1 : len(domain)-1]
+ if strings.HasPrefix(ip, "IPv6:") {
+ return isIPV6(strings.TrimPrefix(ip, "IPv6:"))
+ }
+ return isIPV4(ip)
+ }
+
+ // domain must match the requirements for a hostname
+ if !isHostname(domain) {
+ return false
+ }
+
+ _, err := mail.ParseAddress(s)
+ return err == nil
+}
+
+// isIPV4 tells whether given string is a valid representation of an IPv4 address
+// according to the "dotted-quad" ABNF syntax as defined in RFC 2673, section 3.2.
+func isIPV4(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ groups := strings.Split(s, ".")
+ if len(groups) != 4 {
+ return false
+ }
+ for _, group := range groups {
+ n, err := strconv.Atoi(group)
+ if err != nil {
+ return false
+ }
+ if n < 0 || n > 255 {
+ return false
+ }
+ if n != 0 && group[0] == '0' {
+ return false // leading zeroes should be rejected, as they are treated as octals
+ }
+ }
+ return true
+}
+
+// isIPV6 tells whether given string is a valid representation of an IPv6 address
+// as defined in RFC 2373, section 2.2.
+func isIPV6(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if !strings.Contains(s, ":") {
+ return false
+ }
+ return net.ParseIP(s) != nil
+}
+
+// isURI tells whether given string is valid URI, according to RFC 3986.
+func isURI(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ u, err := urlParse(s)
+ return err == nil && u.IsAbs()
+}
+
+func urlParse(s string) (*url.URL, error) {
+ u, err := url.Parse(s)
+ if err != nil {
+ return nil, err
+ }
+
+ // if hostname is ipv6, validate it
+ hostname := u.Hostname()
+ if strings.IndexByte(hostname, ':') != -1 {
+ if strings.IndexByte(u.Host, '[') == -1 || strings.IndexByte(u.Host, ']') == -1 {
+ return nil, errors.New("ipv6 address is not enclosed in brackets")
+ }
+ if !isIPV6(hostname) {
+ return nil, errors.New("invalid ipv6 address")
+ }
+ }
+ return u, nil
+}
+
+// isURIReference tells whether given string is a valid URI Reference
+// (either a URI or a relative-reference), according to RFC 3986.
+func isURIReference(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ _, err := urlParse(s)
+ return err == nil && !strings.Contains(s, `\`)
+}
+
+// isURITemplate tells whether given string is a valid URI Template
+// according to RFC6570.
+//
+// Current implementation does minimal validation.
+func isURITemplate(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ u, err := urlParse(s)
+ if err != nil {
+ return false
+ }
+ for _, item := range strings.Split(u.RawPath, "/") {
+ depth := 0
+ for _, ch := range item {
+ switch ch {
+ case '{':
+ depth++
+ if depth != 1 {
+ return false
+ }
+ case '}':
+ depth--
+ if depth != 0 {
+ return false
+ }
+ }
+ }
+ if depth != 0 {
+ return false
+ }
+ }
+ return true
+}
+
+// isRegex tells whether given string is a valid regular expression,
+// according to the ECMA 262 regular expression dialect.
+//
+// The implementation uses go-lang regexp package.
+func isRegex(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ _, err := regexp.Compile(s)
+ return err == nil
+}
+
+// isJSONPointer tells whether given string is a valid JSON Pointer.
+//
+// Note: It returns false for JSON Pointer URI fragments.
+func isJSONPointer(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if s != "" && !strings.HasPrefix(s, "/") {
+ return false
+ }
+ for _, item := range strings.Split(s, "/") {
+ for i := 0; i < len(item); i++ {
+ if item[i] == '~' {
+ if i == len(item)-1 {
+ return false
+ }
+ switch item[i+1] {
+ case '0', '1':
+ // valid
+ default:
+ return false
+ }
+ }
+ }
+ }
+ return true
+}
+
+// isRelativeJSONPointer tells whether given string is a valid Relative JSON Pointer.
+//
+// see https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
+func isRelativeJSONPointer(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if s == "" {
+ return false
+ }
+ if s[0] == '0' {
+ s = s[1:]
+ } else if s[0] >= '0' && s[0] <= '9' {
+ for s != "" && s[0] >= '0' && s[0] <= '9' {
+ s = s[1:]
+ }
+ } else {
+ return false
+ }
+ return s == "#" || isJSONPointer(s)
+}
+
+// isUUID tells whether given string is a valid uuid format
+// as specified in RFC4122.
+//
+// see https://datatracker.ietf.org/doc/html/rfc4122#page-4, for details
+func isUUID(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ parseHex := func(n int) bool {
+ for n > 0 {
+ if len(s) == 0 {
+ return false
+ }
+ hex := (s[0] >= '0' && s[0] <= '9') || (s[0] >= 'a' && s[0] <= 'f') || (s[0] >= 'A' && s[0] <= 'F')
+ if !hex {
+ return false
+ }
+ s = s[1:]
+ n--
+ }
+ return true
+ }
+ groups := []int{8, 4, 4, 4, 12}
+ for i, numDigits := range groups {
+ if !parseHex(numDigits) {
+ return false
+ }
+ if i == len(groups)-1 {
+ break
+ }
+ if len(s) == 0 || s[0] != '-' {
+ return false
+ }
+ s = s[1:]
+ }
+ return len(s) == 0
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go
new file mode 100644
index 0000000..4198cfe
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go
@@ -0,0 +1,38 @@
+// Package httploader implements loader.Loader for http/https url.
+//
+// The package is typically only imported for the side effect of
+// registering its Loaders.
+//
+// To use httploader, link this package into your program:
+//
+// import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
+package httploader
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/santhosh-tekuri/jsonschema/v5"
+)
+
+// Client is the default HTTP Client used to Get the resource.
+var Client = http.DefaultClient
+
+// Load loads resource from given http(s) url.
+func Load(url string) (io.ReadCloser, error) {
+ resp, err := Client.Get(url)
+ if err != nil {
+ return nil, err
+ }
+ if resp.StatusCode != http.StatusOK {
+ _ = resp.Body.Close()
+ return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode)
+ }
+ return resp.Body, nil
+}
+
+func init() {
+ jsonschema.Loaders["http"] = Load
+ jsonschema.Loaders["https"] = Load
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
new file mode 100644
index 0000000..c94195c
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
@@ -0,0 +1,60 @@
+package jsonschema
+
+import (
+ "fmt"
+ "io"
+ "net/url"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+)
+
+func loadFileURL(s string) (io.ReadCloser, error) {
+ u, err := url.Parse(s)
+ if err != nil {
+ return nil, err
+ }
+ f := u.Path
+ if runtime.GOOS == "windows" {
+ f = strings.TrimPrefix(f, "/")
+ f = filepath.FromSlash(f)
+ }
+ return os.Open(f)
+}
+
+// Loaders is a registry of functions, which know how to load
+// absolute url of specific schema.
+//
+// New loaders can be registered by adding to this map. Key is schema,
+// value is function that knows how to load url of that schema
+var Loaders = map[string]func(url string) (io.ReadCloser, error){
+ "file": loadFileURL,
+}
+
+// LoaderNotFoundError is the error type returned by Load function.
+// It tells that no Loader is registered for that URL Scheme.
+type LoaderNotFoundError string
+
+func (e LoaderNotFoundError) Error() string {
+ return fmt.Sprintf("jsonschema: no Loader found for %s", string(e))
+}
+
+// LoadURL loads document at given absolute URL. The default implementation
+// uses Loaders registry to lookup by schema and uses that loader.
+//
+// Users can change this variable, if they would like to take complete
+// responsibility of loading given URL. Used by Compiler if its LoadURL
+// field is nil.
+var LoadURL = func(s string) (io.ReadCloser, error) {
+ u, err := url.Parse(s)
+ if err != nil {
+ return nil, err
+ }
+ loader, ok := Loaders[u.Scheme]
+ if !ok {
+ return nil, LoaderNotFoundError(s)
+
+ }
+ return loader(s)
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
new file mode 100644
index 0000000..d65ae2a
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
@@ -0,0 +1,77 @@
+package jsonschema
+
+// Flag is output format with simple boolean property valid.
+type Flag struct {
+ Valid bool `json:"valid"`
+}
+
+// FlagOutput returns output in flag format
+func (ve *ValidationError) FlagOutput() Flag {
+ return Flag{}
+}
+
+// Basic ---
+
+// Basic is output format with flat list of output units.
+type Basic struct {
+ Valid bool `json:"valid"`
+ Errors []BasicError `json:"errors"`
+}
+
+// BasicError is output unit in basic format.
+type BasicError struct {
+ KeywordLocation string `json:"keywordLocation"`
+ AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
+ InstanceLocation string `json:"instanceLocation"`
+ Error string `json:"error"`
+}
+
+// BasicOutput returns output in basic format
+func (ve *ValidationError) BasicOutput() Basic {
+ var errors []BasicError
+ var flatten func(*ValidationError)
+ flatten = func(ve *ValidationError) {
+ errors = append(errors, BasicError{
+ KeywordLocation: ve.KeywordLocation,
+ AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
+ InstanceLocation: ve.InstanceLocation,
+ Error: ve.Message,
+ })
+ for _, cause := range ve.Causes {
+ flatten(cause)
+ }
+ }
+ flatten(ve)
+ return Basic{Errors: errors}
+}
+
+// Detailed ---
+
+// Detailed is output format based on structure of schema.
+type Detailed struct {
+ Valid bool `json:"valid"`
+ KeywordLocation string `json:"keywordLocation"`
+ AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
+ InstanceLocation string `json:"instanceLocation"`
+ Error string `json:"error,omitempty"`
+ Errors []Detailed `json:"errors,omitempty"`
+}
+
+// DetailedOutput returns output in detailed format
+func (ve *ValidationError) DetailedOutput() Detailed {
+ var errors []Detailed
+ for _, cause := range ve.Causes {
+ errors = append(errors, cause.DetailedOutput())
+ }
+ var message = ve.Message
+ if len(ve.Causes) > 0 {
+ message = ""
+ }
+ return Detailed{
+ KeywordLocation: ve.KeywordLocation,
+ AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
+ InstanceLocation: ve.InstanceLocation,
+ Error: message,
+ Errors: errors,
+ }
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
new file mode 100644
index 0000000..18349da
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
@@ -0,0 +1,280 @@
+package jsonschema
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/url"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+type resource struct {
+ url string // base url of resource. can be empty
+ floc string // fragment with json-pointer from root resource
+ doc interface{}
+ draft *Draft
+ subresources map[string]*resource // key is floc. only applicable for root resource
+ schema *Schema
+}
+
+func (r *resource) String() string {
+ return r.url + r.floc
+}
+
+func newResource(url string, r io.Reader) (*resource, error) {
+ if strings.IndexByte(url, '#') != -1 {
+ panic(fmt.Sprintf("BUG: newResource(%q)", url))
+ }
+ doc, err := unmarshal(r)
+ if err != nil {
+ return nil, fmt.Errorf("jsonschema: invalid json %s: %v", url, err)
+ }
+ url, err = toAbs(url)
+ if err != nil {
+ return nil, err
+ }
+ return &resource{
+ url: url,
+ floc: "#",
+ doc: doc,
+ }, nil
+}
+
+// fillSubschemas fills subschemas in res into r.subresources
+func (r *resource) fillSubschemas(c *Compiler, res *resource) error {
+ if err := c.validateSchema(r, res.doc, res.floc[1:]); err != nil {
+ return err
+ }
+
+ if r.subresources == nil {
+ r.subresources = make(map[string]*resource)
+ }
+ if err := r.draft.listSubschemas(res, r.baseURL(res.floc), r.subresources); err != nil {
+ return err
+ }
+
+ // ensure subresource.url uniqueness
+ url2floc := make(map[string]string)
+ for _, sr := range r.subresources {
+ if sr.url != "" {
+ if floc, ok := url2floc[sr.url]; ok {
+ return fmt.Errorf("jsonschema: %q and %q in %s have same canonical-uri", floc[1:], sr.floc[1:], r.url)
+ }
+ url2floc[sr.url] = sr.floc
+ }
+ }
+
+ return nil
+}
+
+// listResources lists all subresources in res
+func (r *resource) listResources(res *resource) []*resource {
+ var result []*resource
+ prefix := res.floc + "/"
+ for _, sr := range r.subresources {
+ if strings.HasPrefix(sr.floc, prefix) {
+ result = append(result, sr)
+ }
+ }
+ return result
+}
+
+func (r *resource) findResource(url string) *resource {
+ if r.url == url {
+ return r
+ }
+ for _, res := range r.subresources {
+ if res.url == url {
+ return res
+ }
+ }
+ return nil
+}
+
+// resolve fragment f with sr as base
+func (r *resource) resolveFragment(c *Compiler, sr *resource, f string) (*resource, error) {
+ if f == "#" || f == "#/" {
+ return sr, nil
+ }
+
+ // resolve by anchor
+ if !strings.HasPrefix(f, "#/") {
+ // check in given resource
+ for _, anchor := range r.draft.anchors(sr.doc) {
+ if anchor == f[1:] {
+ return sr, nil
+ }
+ }
+
+ // check in subresources that has same base url
+ prefix := sr.floc + "/"
+ for _, res := range r.subresources {
+ if strings.HasPrefix(res.floc, prefix) && r.baseURL(res.floc) == sr.url {
+ for _, anchor := range r.draft.anchors(res.doc) {
+ if anchor == f[1:] {
+ return res, nil
+ }
+ }
+ }
+ }
+ return nil, nil
+ }
+
+ // resolve by ptr
+ floc := sr.floc + f[1:]
+ if res, ok := r.subresources[floc]; ok {
+ return res, nil
+ }
+
+ // non-standrad location
+ doc := r.doc
+ for _, item := range strings.Split(floc[2:], "/") {
+ item = strings.Replace(item, "~1", "/", -1)
+ item = strings.Replace(item, "~0", "~", -1)
+ item, err := url.PathUnescape(item)
+ if err != nil {
+ return nil, err
+ }
+ switch d := doc.(type) {
+ case map[string]interface{}:
+ if _, ok := d[item]; !ok {
+ return nil, nil
+ }
+ doc = d[item]
+ case []interface{}:
+ index, err := strconv.Atoi(item)
+ if err != nil {
+ return nil, err
+ }
+ if index < 0 || index >= len(d) {
+ return nil, nil
+ }
+ doc = d[index]
+ default:
+ return nil, nil
+ }
+ }
+
+ id, err := r.draft.resolveID(r.baseURL(floc), doc)
+ if err != nil {
+ return nil, err
+ }
+ res := &resource{url: id, floc: floc, doc: doc}
+ r.subresources[floc] = res
+ if err := r.fillSubschemas(c, res); err != nil {
+ return nil, err
+ }
+ return res, nil
+}
+
+func (r *resource) baseURL(floc string) string {
+ for {
+ if sr, ok := r.subresources[floc]; ok {
+ if sr.url != "" {
+ return sr.url
+ }
+ }
+ slash := strings.LastIndexByte(floc, '/')
+ if slash == -1 {
+ break
+ }
+ floc = floc[:slash]
+ }
+ return r.url
+}
+
+// url helpers ---
+
+func toAbs(s string) (string, error) {
+ // if windows absolute file path, convert to file url
+ // because: net/url parses driver name as scheme
+ if runtime.GOOS == "windows" && len(s) >= 3 && s[1:3] == `:\` {
+ s = "file:///" + filepath.ToSlash(s)
+ }
+
+ u, err := url.Parse(s)
+ if err != nil {
+ return "", err
+ }
+ if u.IsAbs() {
+ return s, nil
+ }
+
+ // s is filepath
+ if s, err = filepath.Abs(s); err != nil {
+ return "", err
+ }
+ if runtime.GOOS == "windows" {
+ s = "file:///" + filepath.ToSlash(s)
+ } else {
+ s = "file://" + s
+ }
+ u, err = url.Parse(s) // to fix spaces in filepath
+ return u.String(), err
+}
+
+func resolveURL(base, ref string) (string, error) {
+ if ref == "" {
+ return base, nil
+ }
+ if strings.HasPrefix(ref, "urn:") {
+ return ref, nil
+ }
+
+ refURL, err := url.Parse(ref)
+ if err != nil {
+ return "", err
+ }
+ if refURL.IsAbs() {
+ return ref, nil
+ }
+
+ if strings.HasPrefix(base, "urn:") {
+ base, _ = split(base)
+ return base + ref, nil
+ }
+
+ baseURL, err := url.Parse(base)
+ if err != nil {
+ return "", err
+ }
+ return baseURL.ResolveReference(refURL).String(), nil
+}
+
+func split(uri string) (string, string) {
+ hash := strings.IndexByte(uri, '#')
+ if hash == -1 {
+ return uri, "#"
+ }
+ f := uri[hash:]
+ if f == "#/" {
+ f = "#"
+ }
+ return uri[0:hash], f
+}
+
+func (s *Schema) url() string {
+ u, _ := split(s.Location)
+ return u
+}
+
+func (s *Schema) loc() string {
+ _, f := split(s.Location)
+ return f[1:]
+}
+
+func unmarshal(r io.Reader) (interface{}, error) {
+ decoder := json.NewDecoder(r)
+ decoder.UseNumber()
+ var doc interface{}
+ if err := decoder.Decode(&doc); err != nil {
+ return nil, err
+ }
+ if t, _ := decoder.Token(); t != nil {
+ return nil, fmt.Errorf("invalid character %v after top-level value", t)
+ }
+ return doc, nil
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
new file mode 100644
index 0000000..0c8d8a3
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
@@ -0,0 +1,826 @@
+package jsonschema
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "math/big"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+)
+
+// A Schema represents compiled version of json-schema.
+type Schema struct {
+ Location string // absolute location
+
+ meta *Schema
+ vocab []string
+ dynamicAnchors []*Schema
+
+ // type agnostic validations
+ Format string
+ format func(interface{}) bool
+ Always *bool // always pass/fail. used when booleans are used as schemas in draft-07.
+ Ref *Schema
+ RecursiveAnchor bool
+ RecursiveRef *Schema
+ DynamicAnchor string
+ DynamicRef *Schema
+ Types []string // allowed types.
+ Constant []interface{} // first element in slice is constant value. note: slice is used to capture nil constant.
+ Enum []interface{} // allowed values.
+ enumError string // error message for enum fail. captured here to avoid constructing error message every time.
+ Not *Schema
+ AllOf []*Schema
+ AnyOf []*Schema
+ OneOf []*Schema
+ If *Schema
+ Then *Schema // nil, when If is nil.
+ Else *Schema // nil, when If is nil.
+
+ // object validations
+ MinProperties int // -1 if not specified.
+ MaxProperties int // -1 if not specified.
+ Required []string // list of required properties.
+ Properties map[string]*Schema
+ PropertyNames *Schema
+ RegexProperties bool // property names must be valid regex. used only in draft4 as workaround in metaschema.
+ PatternProperties map[*regexp.Regexp]*Schema
+ AdditionalProperties interface{} // nil or bool or *Schema.
+ Dependencies map[string]interface{} // map value is *Schema or []string.
+ DependentRequired map[string][]string
+ DependentSchemas map[string]*Schema
+ UnevaluatedProperties *Schema
+
+ // array validations
+ MinItems int // -1 if not specified.
+ MaxItems int // -1 if not specified.
+ UniqueItems bool
+ Items interface{} // nil or *Schema or []*Schema
+ AdditionalItems interface{} // nil or bool or *Schema.
+ PrefixItems []*Schema
+ Items2020 *Schema // items keyword reintroduced in draft 2020-12
+ Contains *Schema
+ ContainsEval bool // whether any item in an array that passes validation of the contains schema is considered "evaluated"
+ MinContains int // 1 if not specified
+ MaxContains int // -1 if not specified
+ UnevaluatedItems *Schema
+
+ // string validations
+ MinLength int // -1 if not specified.
+ MaxLength int // -1 if not specified.
+ Pattern *regexp.Regexp
+ ContentEncoding string
+ decoder func(string) ([]byte, error)
+ ContentMediaType string
+ mediaType func([]byte) error
+ ContentSchema *Schema
+
+ // number validators
+ Minimum *big.Rat
+ ExclusiveMinimum *big.Rat
+ Maximum *big.Rat
+ ExclusiveMaximum *big.Rat
+ MultipleOf *big.Rat
+
+ // annotations. captured only when Compiler.ExtractAnnotations is true.
+ Title string
+ Description string
+ Default interface{}
+ Comment string
+ ReadOnly bool
+ WriteOnly bool
+ Examples []interface{}
+ Deprecated bool
+
+ // user defined extensions
+ Extensions map[string]ExtSchema
+}
+
+func (s *Schema) String() string {
+ return s.Location
+}
+
+func newSchema(url, floc string, doc interface{}) *Schema {
+ // fill with default values
+ s := &Schema{
+ Location: url + floc,
+ MinProperties: -1,
+ MaxProperties: -1,
+ MinItems: -1,
+ MaxItems: -1,
+ MinContains: 1,
+ MaxContains: -1,
+ MinLength: -1,
+ MaxLength: -1,
+ }
+
+ if doc, ok := doc.(map[string]interface{}); ok {
+ if ra, ok := doc["$recursiveAnchor"]; ok {
+ if ra, ok := ra.(bool); ok {
+ s.RecursiveAnchor = ra
+ }
+ }
+ if da, ok := doc["$dynamicAnchor"]; ok {
+ if da, ok := da.(string); ok {
+ s.DynamicAnchor = da
+ }
+ }
+ }
+ return s
+}
+
+func (s *Schema) hasVocab(name string) bool {
+ if s == nil { // during bootstrap
+ return true
+ }
+ if name == "core" {
+ return true
+ }
+ for _, url := range s.vocab {
+ if url == "https://json-schema.org/draft/2019-09/vocab/"+name {
+ return true
+ }
+ if url == "https://json-schema.org/draft/2020-12/vocab/"+name {
+ return true
+ }
+ }
+ return false
+}
+
+// Validate validates given doc, against the json-schema s.
+//
+// the v must be the raw json value. for number precision
+// unmarshal with json.UseNumber().
+//
+// returns *ValidationError if v does not confirm with schema s.
+// returns InfiniteLoopError if it detects loop during validation.
+// returns InvalidJSONTypeError if it detects any non json value in v.
+func (s *Schema) Validate(v interface{}) (err error) {
+ return s.validateValue(v, "")
+}
+
+func (s *Schema) validateValue(v interface{}, vloc string) (err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ switch r := r.(type) {
+ case InfiniteLoopError, InvalidJSONTypeError:
+ err = r.(error)
+ default:
+ panic(r)
+ }
+ }
+ }()
+ if _, err := s.validate(nil, 0, "", v, vloc); err != nil {
+ ve := ValidationError{
+ KeywordLocation: "",
+ AbsoluteKeywordLocation: s.Location,
+ InstanceLocation: vloc,
+ Message: fmt.Sprintf("doesn't validate with %s", s.Location),
+ }
+ return ve.causes(err)
+ }
+ return nil
+}
+
+// validate validates given value v with this schema.
+func (s *Schema) validate(scope []schemaRef, vscope int, spath string, v interface{}, vloc string) (result validationResult, err error) {
+ validationError := func(keywordPath string, format string, a ...interface{}) *ValidationError {
+ return &ValidationError{
+ KeywordLocation: keywordLocation(scope, keywordPath),
+ AbsoluteKeywordLocation: joinPtr(s.Location, keywordPath),
+ InstanceLocation: vloc,
+ Message: fmt.Sprintf(format, a...),
+ }
+ }
+
+ sref := schemaRef{spath, s, false}
+ if err := checkLoop(scope[len(scope)-vscope:], sref); err != nil {
+ panic(err)
+ }
+ scope = append(scope, sref)
+ vscope++
+
+ // populate result
+ switch v := v.(type) {
+ case map[string]interface{}:
+ result.unevalProps = make(map[string]struct{})
+ for pname := range v {
+ result.unevalProps[pname] = struct{}{}
+ }
+ case []interface{}:
+ result.unevalItems = make(map[int]struct{})
+ for i := range v {
+ result.unevalItems[i] = struct{}{}
+ }
+ }
+
+ validate := func(sch *Schema, schPath string, v interface{}, vpath string) error {
+ vloc := vloc
+ if vpath != "" {
+ vloc += "/" + vpath
+ }
+ _, err := sch.validate(scope, 0, schPath, v, vloc)
+ return err
+ }
+
+ validateInplace := func(sch *Schema, schPath string) error {
+ vr, err := sch.validate(scope, vscope, schPath, v, vloc)
+ if err == nil {
+ // update result
+ for pname := range result.unevalProps {
+ if _, ok := vr.unevalProps[pname]; !ok {
+ delete(result.unevalProps, pname)
+ }
+ }
+ for i := range result.unevalItems {
+ if _, ok := vr.unevalItems[i]; !ok {
+ delete(result.unevalItems, i)
+ }
+ }
+ }
+ return err
+ }
+
+ if s.Always != nil {
+ if !*s.Always {
+ return result, validationError("", "not allowed")
+ }
+ return result, nil
+ }
+
+ if len(s.Types) > 0 {
+ vType := jsonType(v)
+ matched := false
+ for _, t := range s.Types {
+ if vType == t {
+ matched = true
+ break
+ } else if t == "integer" && vType == "number" {
+ num, _ := new(big.Rat).SetString(fmt.Sprint(v))
+ if num.IsInt() {
+ matched = true
+ break
+ }
+ }
+ }
+ if !matched {
+ return result, validationError("type", "expected %s, but got %s", strings.Join(s.Types, " or "), vType)
+ }
+ }
+
+ var errors []error
+
+ if len(s.Constant) > 0 {
+ if !equals(v, s.Constant[0]) {
+ switch jsonType(s.Constant[0]) {
+ case "object", "array":
+ errors = append(errors, validationError("const", "const failed"))
+ default:
+ errors = append(errors, validationError("const", "value must be %#v", s.Constant[0]))
+ }
+ }
+ }
+
+ if len(s.Enum) > 0 {
+ matched := false
+ for _, item := range s.Enum {
+ if equals(v, item) {
+ matched = true
+ break
+ }
+ }
+ if !matched {
+ errors = append(errors, validationError("enum", s.enumError))
+ }
+ }
+
+ if s.format != nil && !s.format(v) {
+ var val = v
+ if v, ok := v.(string); ok {
+ val = quote(v)
+ }
+ errors = append(errors, validationError("format", "%v is not valid %s", val, quote(s.Format)))
+ }
+
+ switch v := v.(type) {
+ case map[string]interface{}:
+ if s.MinProperties != -1 && len(v) < s.MinProperties {
+ errors = append(errors, validationError("minProperties", "minimum %d properties allowed, but found %d properties", s.MinProperties, len(v)))
+ }
+ if s.MaxProperties != -1 && len(v) > s.MaxProperties {
+ errors = append(errors, validationError("maxProperties", "maximum %d properties allowed, but found %d properties", s.MaxProperties, len(v)))
+ }
+ if len(s.Required) > 0 {
+ var missing []string
+ for _, pname := range s.Required {
+ if _, ok := v[pname]; !ok {
+ missing = append(missing, quote(pname))
+ }
+ }
+ if len(missing) > 0 {
+ errors = append(errors, validationError("required", "missing properties: %s", strings.Join(missing, ", ")))
+ }
+ }
+
+ for pname, sch := range s.Properties {
+ if pvalue, ok := v[pname]; ok {
+ delete(result.unevalProps, pname)
+ if err := validate(sch, "properties/"+escape(pname), pvalue, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+
+ if s.PropertyNames != nil {
+ for pname := range v {
+ if err := validate(s.PropertyNames, "propertyNames", pname, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+
+ if s.RegexProperties {
+ for pname := range v {
+ if !isRegex(pname) {
+ errors = append(errors, validationError("", "patternProperty %s is not valid regex", quote(pname)))
+ }
+ }
+ }
+ for pattern, sch := range s.PatternProperties {
+ for pname, pvalue := range v {
+ if pattern.MatchString(pname) {
+ delete(result.unevalProps, pname)
+ if err := validate(sch, "patternProperties/"+escape(pattern.String()), pvalue, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+ }
+ if s.AdditionalProperties != nil {
+ if allowed, ok := s.AdditionalProperties.(bool); ok {
+ if !allowed && len(result.unevalProps) > 0 {
+ errors = append(errors, validationError("additionalProperties", "additionalProperties %s not allowed", result.unevalPnames()))
+ }
+ } else {
+ schema := s.AdditionalProperties.(*Schema)
+ for pname := range result.unevalProps {
+ if pvalue, ok := v[pname]; ok {
+ if err := validate(schema, "additionalProperties", pvalue, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+ }
+ result.unevalProps = nil
+ }
+ for dname, dvalue := range s.Dependencies {
+ if _, ok := v[dname]; ok {
+ switch dvalue := dvalue.(type) {
+ case *Schema:
+ if err := validateInplace(dvalue, "dependencies/"+escape(dname)); err != nil {
+ errors = append(errors, err)
+ }
+ case []string:
+ for i, pname := range dvalue {
+ if _, ok := v[pname]; !ok {
+ errors = append(errors, validationError("dependencies/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
+ }
+ }
+ }
+ }
+ }
+ for dname, dvalue := range s.DependentRequired {
+ if _, ok := v[dname]; ok {
+ for i, pname := range dvalue {
+ if _, ok := v[pname]; !ok {
+ errors = append(errors, validationError("dependentRequired/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
+ }
+ }
+ }
+ }
+ for dname, sch := range s.DependentSchemas {
+ if _, ok := v[dname]; ok {
+ if err := validateInplace(sch, "dependentSchemas/"+escape(dname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+
+ case []interface{}:
+ if s.MinItems != -1 && len(v) < s.MinItems {
+ errors = append(errors, validationError("minItems", "minimum %d items required, but found %d items", s.MinItems, len(v)))
+ }
+ if s.MaxItems != -1 && len(v) > s.MaxItems {
+ errors = append(errors, validationError("maxItems", "maximum %d items required, but found %d items", s.MaxItems, len(v)))
+ }
+ if s.UniqueItems {
+ for i := 1; i < len(v); i++ {
+ for j := 0; j < i; j++ {
+ if equals(v[i], v[j]) {
+ errors = append(errors, validationError("uniqueItems", "items at index %d and %d are equal", j, i))
+ }
+ }
+ }
+ }
+
+ // items + additionalItems
+ switch items := s.Items.(type) {
+ case *Schema:
+ for i, item := range v {
+ if err := validate(items, "items", item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ result.unevalItems = nil
+ case []*Schema:
+ for i, item := range v {
+ if i < len(items) {
+ delete(result.unevalItems, i)
+ if err := validate(items[i], "items/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ } else if sch, ok := s.AdditionalItems.(*Schema); ok {
+ delete(result.unevalItems, i)
+ if err := validate(sch, "additionalItems", item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ } else {
+ break
+ }
+ }
+ if additionalItems, ok := s.AdditionalItems.(bool); ok {
+ if additionalItems {
+ result.unevalItems = nil
+ } else if len(v) > len(items) {
+ errors = append(errors, validationError("additionalItems", "only %d items are allowed, but found %d items", len(items), len(v)))
+ }
+ }
+ }
+
+ // prefixItems + items
+ for i, item := range v {
+ if i < len(s.PrefixItems) {
+ delete(result.unevalItems, i)
+ if err := validate(s.PrefixItems[i], "prefixItems/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ } else if s.Items2020 != nil {
+ delete(result.unevalItems, i)
+ if err := validate(s.Items2020, "items", item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ } else {
+ break
+ }
+ }
+
+ // contains + minContains + maxContains
+ if s.Contains != nil && (s.MinContains != -1 || s.MaxContains != -1) {
+ matched := 0
+ var causes []error
+ for i, item := range v {
+ if err := validate(s.Contains, "contains", item, strconv.Itoa(i)); err != nil {
+ causes = append(causes, err)
+ } else {
+ matched++
+ if s.ContainsEval {
+ delete(result.unevalItems, i)
+ }
+ }
+ }
+ if s.MinContains != -1 && matched < s.MinContains {
+ errors = append(errors, validationError("minContains", "valid must be >= %d, but got %d", s.MinContains, matched).add(causes...))
+ }
+ if s.MaxContains != -1 && matched > s.MaxContains {
+ errors = append(errors, validationError("maxContains", "valid must be <= %d, but got %d", s.MaxContains, matched))
+ }
+ }
+
+ case string:
+ // minLength + maxLength
+ if s.MinLength != -1 || s.MaxLength != -1 {
+ length := utf8.RuneCount([]byte(v))
+ if s.MinLength != -1 && length < s.MinLength {
+ errors = append(errors, validationError("minLength", "length must be >= %d, but got %d", s.MinLength, length))
+ }
+ if s.MaxLength != -1 && length > s.MaxLength {
+ errors = append(errors, validationError("maxLength", "length must be <= %d, but got %d", s.MaxLength, length))
+ }
+ }
+
+ if s.Pattern != nil && !s.Pattern.MatchString(v) {
+ errors = append(errors, validationError("pattern", "does not match pattern %s", quote(s.Pattern.String())))
+ }
+
+ // contentEncoding + contentMediaType
+ if s.decoder != nil || s.mediaType != nil {
+ decoded := s.ContentEncoding == ""
+ var content []byte
+ if s.decoder != nil {
+ b, err := s.decoder(v)
+ if err != nil {
+ errors = append(errors, validationError("contentEncoding", "value is not %s encoded", s.ContentEncoding))
+ } else {
+ content, decoded = b, true
+ }
+ }
+ if decoded && s.mediaType != nil {
+ if s.decoder == nil {
+ content = []byte(v)
+ }
+ if err := s.mediaType(content); err != nil {
+ errors = append(errors, validationError("contentMediaType", "value is not of mediatype %s", quote(s.ContentMediaType)))
+ }
+ }
+ if decoded && s.ContentSchema != nil {
+ contentJSON, err := unmarshal(bytes.NewReader(content))
+ if err != nil {
+ errors = append(errors, validationError("contentSchema", "value is not valid json"))
+ } else {
+ err := validate(s.ContentSchema, "contentSchema", contentJSON, "")
+ if err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+ }
+
+ case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
+ // lazy convert to *big.Rat to avoid allocation
+ var numVal *big.Rat
+ num := func() *big.Rat {
+ if numVal == nil {
+ numVal, _ = new(big.Rat).SetString(fmt.Sprint(v))
+ }
+ return numVal
+ }
+ f64 := func(r *big.Rat) float64 {
+ f, _ := r.Float64()
+ return f
+ }
+ if s.Minimum != nil && num().Cmp(s.Minimum) < 0 {
+ errors = append(errors, validationError("minimum", "must be >= %v but found %v", f64(s.Minimum), v))
+ }
+ if s.ExclusiveMinimum != nil && num().Cmp(s.ExclusiveMinimum) <= 0 {
+ errors = append(errors, validationError("exclusiveMinimum", "must be > %v but found %v", f64(s.ExclusiveMinimum), v))
+ }
+ if s.Maximum != nil && num().Cmp(s.Maximum) > 0 {
+ errors = append(errors, validationError("maximum", "must be <= %v but found %v", f64(s.Maximum), v))
+ }
+ if s.ExclusiveMaximum != nil && num().Cmp(s.ExclusiveMaximum) >= 0 {
+ errors = append(errors, validationError("exclusiveMaximum", "must be < %v but found %v", f64(s.ExclusiveMaximum), v))
+ }
+ if s.MultipleOf != nil {
+ if q := new(big.Rat).Quo(num(), s.MultipleOf); !q.IsInt() {
+ errors = append(errors, validationError("multipleOf", "%v not multipleOf %v", v, f64(s.MultipleOf)))
+ }
+ }
+ }
+
+ // $ref + $recursiveRef + $dynamicRef
+ validateRef := func(sch *Schema, refPath string) error {
+ if sch != nil {
+ if err := validateInplace(sch, refPath); err != nil {
+ var url = sch.Location
+ if s.url() == sch.url() {
+ url = sch.loc()
+ }
+ return validationError(refPath, "doesn't validate with %s", quote(url)).causes(err)
+ }
+ }
+ return nil
+ }
+ if err := validateRef(s.Ref, "$ref"); err != nil {
+ errors = append(errors, err)
+ }
+ if s.RecursiveRef != nil {
+ sch := s.RecursiveRef
+ if sch.RecursiveAnchor {
+ // recursiveRef based on scope
+ for _, e := range scope {
+ if e.schema.RecursiveAnchor {
+ sch = e.schema
+ break
+ }
+ }
+ }
+ if err := validateRef(sch, "$recursiveRef"); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ if s.DynamicRef != nil {
+ sch := s.DynamicRef
+ if sch.DynamicAnchor != "" {
+ // dynamicRef based on scope
+ for i := len(scope) - 1; i >= 0; i-- {
+ sr := scope[i]
+ if sr.discard {
+ break
+ }
+ for _, da := range sr.schema.dynamicAnchors {
+ if da.DynamicAnchor == s.DynamicRef.DynamicAnchor && da != s.DynamicRef {
+ sch = da
+ break
+ }
+ }
+ }
+ }
+ if err := validateRef(sch, "$dynamicRef"); err != nil {
+ errors = append(errors, err)
+ }
+ }
+
+ if s.Not != nil && validateInplace(s.Not, "not") == nil {
+ errors = append(errors, validationError("not", "not failed"))
+ }
+
+ for i, sch := range s.AllOf {
+ schPath := "allOf/" + strconv.Itoa(i)
+ if err := validateInplace(sch, schPath); err != nil {
+ errors = append(errors, validationError(schPath, "allOf failed").add(err))
+ }
+ }
+
+ if len(s.AnyOf) > 0 {
+ matched := false
+ var causes []error
+ for i, sch := range s.AnyOf {
+ if err := validateInplace(sch, "anyOf/"+strconv.Itoa(i)); err == nil {
+ matched = true
+ } else {
+ causes = append(causes, err)
+ }
+ }
+ if !matched {
+ errors = append(errors, validationError("anyOf", "anyOf failed").add(causes...))
+ }
+ }
+
+ if len(s.OneOf) > 0 {
+ matched := -1
+ var causes []error
+ for i, sch := range s.OneOf {
+ if err := validateInplace(sch, "oneOf/"+strconv.Itoa(i)); err == nil {
+ if matched == -1 {
+ matched = i
+ } else {
+ errors = append(errors, validationError("oneOf", "valid against schemas at indexes %d and %d", matched, i))
+ break
+ }
+ } else {
+ causes = append(causes, err)
+ }
+ }
+ if matched == -1 {
+ errors = append(errors, validationError("oneOf", "oneOf failed").add(causes...))
+ }
+ }
+
+ // if + then + else
+ if s.If != nil {
+ err := validateInplace(s.If, "if")
+ // "if" leaves dynamic scope
+ scope[len(scope)-1].discard = true
+ if err == nil {
+ if s.Then != nil {
+ if err := validateInplace(s.Then, "then"); err != nil {
+ errors = append(errors, validationError("then", "if-then failed").add(err))
+ }
+ }
+ } else {
+ if s.Else != nil {
+ if err := validateInplace(s.Else, "else"); err != nil {
+ errors = append(errors, validationError("else", "if-else failed").add(err))
+ }
+ }
+ }
+ // restore dynamic scope
+ scope[len(scope)-1].discard = false
+ }
+
+ for _, ext := range s.Extensions {
+ if err := ext.Validate(ValidationContext{result, validate, validateInplace, validationError}, v); err != nil {
+ errors = append(errors, err)
+ }
+ }
+
+ // UnevaluatedProperties + UnevaluatedItems
+ switch v := v.(type) {
+ case map[string]interface{}:
+ if s.UnevaluatedProperties != nil {
+ for pname := range result.unevalProps {
+ if pvalue, ok := v[pname]; ok {
+ if err := validate(s.UnevaluatedProperties, "UnevaluatedProperties", pvalue, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+ result.unevalProps = nil
+ }
+ case []interface{}:
+ if s.UnevaluatedItems != nil {
+ for i := range result.unevalItems {
+ if err := validate(s.UnevaluatedItems, "UnevaluatedItems", v[i], strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ result.unevalItems = nil
+ }
+ }
+
+ switch len(errors) {
+ case 0:
+ return result, nil
+ case 1:
+ return result, errors[0]
+ default:
+ return result, validationError("", "").add(errors...) // empty message, used just for wrapping
+ }
+}
+
+type validationResult struct {
+ unevalProps map[string]struct{}
+ unevalItems map[int]struct{}
+}
+
+func (vr validationResult) unevalPnames() string {
+ pnames := make([]string, 0, len(vr.unevalProps))
+ for pname := range vr.unevalProps {
+ pnames = append(pnames, quote(pname))
+ }
+ return strings.Join(pnames, ", ")
+}
+
+// jsonType returns the json type of given value v.
+//
+// It panics if the given value is not valid json value
+func jsonType(v interface{}) string {
+ switch v.(type) {
+ case nil:
+ return "null"
+ case bool:
+ return "boolean"
+ case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
+ return "number"
+ case string:
+ return "string"
+ case []interface{}:
+ return "array"
+ case map[string]interface{}:
+ return "object"
+ }
+ panic(InvalidJSONTypeError(fmt.Sprintf("%T", v)))
+}
+
+// equals tells if given two json values are equal or not.
+func equals(v1, v2 interface{}) bool {
+ v1Type := jsonType(v1)
+ if v1Type != jsonType(v2) {
+ return false
+ }
+ switch v1Type {
+ case "array":
+ arr1, arr2 := v1.([]interface{}), v2.([]interface{})
+ if len(arr1) != len(arr2) {
+ return false
+ }
+ for i := range arr1 {
+ if !equals(arr1[i], arr2[i]) {
+ return false
+ }
+ }
+ return true
+ case "object":
+ obj1, obj2 := v1.(map[string]interface{}), v2.(map[string]interface{})
+ if len(obj1) != len(obj2) {
+ return false
+ }
+ for k, v1 := range obj1 {
+ if v2, ok := obj2[k]; ok {
+ if !equals(v1, v2) {
+ return false
+ }
+ } else {
+ return false
+ }
+ }
+ return true
+ case "number":
+ num1, _ := new(big.Rat).SetString(fmt.Sprint(v1))
+ num2, _ := new(big.Rat).SetString(fmt.Sprint(v2))
+ return num1.Cmp(num2) == 0
+ default:
+ return v1 == v2
+ }
+}
+
+// escape converts given token to valid json-pointer token
+func escape(token string) string {
+ token = strings.ReplaceAll(token, "~", "~0")
+ token = strings.ReplaceAll(token, "/", "~1")
+ return url.PathEscape(token)
+}
diff --git a/vendor/github.com/xeipuuv/gojsonpointer/LICENSE-APACHE-2.0.txt b/vendor/github.com/xeipuuv/gojsonpointer/LICENSE-APACHE-2.0.txt
deleted file mode 100644
index 55ede8a..0000000
--- a/vendor/github.com/xeipuuv/gojsonpointer/LICENSE-APACHE-2.0.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2015 xeipuuv
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/xeipuuv/gojsonpointer/README.md b/vendor/github.com/xeipuuv/gojsonpointer/README.md
deleted file mode 100644
index a4f5f14..0000000
--- a/vendor/github.com/xeipuuv/gojsonpointer/README.md
+++ /dev/null
@@ -1,41 +0,0 @@
-# gojsonpointer
-An implementation of JSON Pointer - Go language
-
-## Usage
- jsonText := `{
- "name": "Bobby B",
- "occupation": {
- "title" : "King",
- "years" : 15,
- "heir" : "Joffrey B"
- }
- }`
-
- var jsonDocument map[string]interface{}
- json.Unmarshal([]byte(jsonText), &jsonDocument)
-
- //create a JSON pointer
- pointerString := "/occupation/title"
- pointer, _ := NewJsonPointer(pointerString)
-
- //SET a new value for the "title" in the document
- pointer.Set(jsonDocument, "Supreme Leader of Westeros")
-
- //GET the new "title" from the document
- title, _, _ := pointer.Get(jsonDocument)
- fmt.Println(title) //outputs "Supreme Leader of Westeros"
-
- //DELETE the "heir" from the document
- deletePointer := NewJsonPointer("/occupation/heir")
- deletePointer.Delete(jsonDocument)
-
- b, _ := json.Marshal(jsonDocument)
- fmt.Println(string(b))
- //outputs `{"name":"Bobby B","occupation":{"title":"Supreme Leader of Westeros","years":15}}`
-
-
-## References
-https://tools.ietf.org/html/rfc6901
-
-### Note
-The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, the reference token MUST contain either...' is not implemented.
diff --git a/vendor/github.com/xeipuuv/gojsonpointer/pointer.go b/vendor/github.com/xeipuuv/gojsonpointer/pointer.go
deleted file mode 100644
index 798c1f1..0000000
--- a/vendor/github.com/xeipuuv/gojsonpointer/pointer.go
+++ /dev/null
@@ -1,211 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonpointer
-// repository-desc An implementation of JSON Pointer - Go language
-//
-// description Main and unique file.
-//
-// created 25-02-2013
-
-package gojsonpointer
-
-import (
- "errors"
- "fmt"
- "reflect"
- "strconv"
- "strings"
-)
-
-const (
- const_empty_pointer = ``
- const_pointer_separator = `/`
-
- const_invalid_start = `JSON pointer must be empty or start with a "` + const_pointer_separator + `"`
-)
-
-type implStruct struct {
- mode string // "SET" or "GET"
-
- inDocument interface{}
-
- setInValue interface{}
-
- getOutNode interface{}
- getOutKind reflect.Kind
- outError error
-}
-
-type JsonPointer struct {
- referenceTokens []string
-}
-
-// NewJsonPointer parses the given string JSON pointer and returns an object
-func NewJsonPointer(jsonPointerString string) (p JsonPointer, err error) {
-
- // Pointer to the root of the document
- if len(jsonPointerString) == 0 {
- // Keep referenceTokens nil
- return
- }
- if jsonPointerString[0] != '/' {
- return p, errors.New(const_invalid_start)
- }
-
- p.referenceTokens = strings.Split(jsonPointerString[1:], const_pointer_separator)
- return
-}
-
-// Uses the pointer to retrieve a value from a JSON document
-func (p *JsonPointer) Get(document interface{}) (interface{}, reflect.Kind, error) {
-
- is := &implStruct{mode: "GET", inDocument: document}
- p.implementation(is)
- return is.getOutNode, is.getOutKind, is.outError
-
-}
-
-// Uses the pointer to update a value from a JSON document
-func (p *JsonPointer) Set(document interface{}, value interface{}) (interface{}, error) {
-
- is := &implStruct{mode: "SET", inDocument: document, setInValue: value}
- p.implementation(is)
- return document, is.outError
-
-}
-
-// Uses the pointer to delete a value from a JSON document
-func (p *JsonPointer) Delete(document interface{}) (interface{}, error) {
- is := &implStruct{mode: "DEL", inDocument: document}
- p.implementation(is)
- return document, is.outError
-}
-
-// Both Get and Set functions use the same implementation to avoid code duplication
-func (p *JsonPointer) implementation(i *implStruct) {
-
- kind := reflect.Invalid
-
- // Full document when empty
- if len(p.referenceTokens) == 0 {
- i.getOutNode = i.inDocument
- i.outError = nil
- i.getOutKind = kind
- i.outError = nil
- return
- }
-
- node := i.inDocument
-
- previousNodes := make([]interface{}, len(p.referenceTokens))
- previousTokens := make([]string, len(p.referenceTokens))
-
- for ti, token := range p.referenceTokens {
-
- isLastToken := ti == len(p.referenceTokens)-1
- previousNodes[ti] = node
- previousTokens[ti] = token
-
- switch v := node.(type) {
-
- case map[string]interface{}:
- decodedToken := decodeReferenceToken(token)
- if _, ok := v[decodedToken]; ok {
- node = v[decodedToken]
- if isLastToken && i.mode == "SET" {
- v[decodedToken] = i.setInValue
- } else if isLastToken && i.mode == "DEL" {
- delete(v, decodedToken)
- }
- } else if isLastToken && i.mode == "SET" {
- v[decodedToken] = i.setInValue
- } else {
- i.outError = fmt.Errorf("Object has no key '%s'", decodedToken)
- i.getOutKind = reflect.Map
- i.getOutNode = nil
- return
- }
-
- case []interface{}:
- tokenIndex, err := strconv.Atoi(token)
- if err != nil {
- i.outError = fmt.Errorf("Invalid array index '%s'", token)
- i.getOutKind = reflect.Slice
- i.getOutNode = nil
- return
- }
- if tokenIndex < 0 || tokenIndex >= len(v) {
- i.outError = fmt.Errorf("Out of bound array[0,%d] index '%d'", len(v), tokenIndex)
- i.getOutKind = reflect.Slice
- i.getOutNode = nil
- return
- }
-
- node = v[tokenIndex]
- if isLastToken && i.mode == "SET" {
- v[tokenIndex] = i.setInValue
- } else if isLastToken && i.mode == "DEL" {
- v[tokenIndex] = v[len(v)-1]
- v[len(v)-1] = nil
- v = v[:len(v)-1]
- previousNodes[ti-1].(map[string]interface{})[previousTokens[ti-1]] = v
- }
-
- default:
- i.outError = fmt.Errorf("Invalid token reference '%s'", token)
- i.getOutKind = reflect.ValueOf(node).Kind()
- i.getOutNode = nil
- return
- }
-
- }
-
- i.getOutNode = node
- i.getOutKind = reflect.ValueOf(node).Kind()
- i.outError = nil
-}
-
-// Pointer to string representation function
-func (p *JsonPointer) String() string {
-
- if len(p.referenceTokens) == 0 {
- return const_empty_pointer
- }
-
- pointerString := const_pointer_separator + strings.Join(p.referenceTokens, const_pointer_separator)
-
- return pointerString
-}
-
-// Specific JSON pointer encoding here
-// ~0 => ~
-// ~1 => /
-// ... and vice versa
-
-func decodeReferenceToken(token string) string {
- step1 := strings.Replace(token, `~1`, `/`, -1)
- step2 := strings.Replace(step1, `~0`, `~`, -1)
- return step2
-}
-
-func encodeReferenceToken(token string) string {
- step1 := strings.Replace(token, `~`, `~0`, -1)
- step2 := strings.Replace(step1, `/`, `~1`, -1)
- return step2
-}
diff --git a/vendor/github.com/xeipuuv/gojsonreference/README.md b/vendor/github.com/xeipuuv/gojsonreference/README.md
deleted file mode 100644
index 9ab6e1e..0000000
--- a/vendor/github.com/xeipuuv/gojsonreference/README.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# gojsonreference
-An implementation of JSON Reference - Go language
-
-## Dependencies
-/~https://github.com/xeipuuv/gojsonpointer
-
-## References
-http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
-
-http://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03
diff --git a/vendor/github.com/xeipuuv/gojsonreference/reference.go b/vendor/github.com/xeipuuv/gojsonreference/reference.go
deleted file mode 100644
index 6457291..0000000
--- a/vendor/github.com/xeipuuv/gojsonreference/reference.go
+++ /dev/null
@@ -1,147 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonreference
-// repository-desc An implementation of JSON Reference - Go language
-//
-// description Main and unique file.
-//
-// created 26-02-2013
-
-package gojsonreference
-
-import (
- "errors"
- "net/url"
- "path/filepath"
- "runtime"
- "strings"
-
- "github.com/xeipuuv/gojsonpointer"
-)
-
-const (
- const_fragment_char = `#`
-)
-
-func NewJsonReference(jsonReferenceString string) (JsonReference, error) {
-
- var r JsonReference
- err := r.parse(jsonReferenceString)
- return r, err
-
-}
-
-type JsonReference struct {
- referenceUrl *url.URL
- referencePointer gojsonpointer.JsonPointer
-
- HasFullUrl bool
- HasUrlPathOnly bool
- HasFragmentOnly bool
- HasFileScheme bool
- HasFullFilePath bool
-}
-
-func (r *JsonReference) GetUrl() *url.URL {
- return r.referenceUrl
-}
-
-func (r *JsonReference) GetPointer() *gojsonpointer.JsonPointer {
- return &r.referencePointer
-}
-
-func (r *JsonReference) String() string {
-
- if r.referenceUrl != nil {
- return r.referenceUrl.String()
- }
-
- if r.HasFragmentOnly {
- return const_fragment_char + r.referencePointer.String()
- }
-
- return r.referencePointer.String()
-}
-
-func (r *JsonReference) IsCanonical() bool {
- return (r.HasFileScheme && r.HasFullFilePath) || (!r.HasFileScheme && r.HasFullUrl)
-}
-
-// "Constructor", parses the given string JSON reference
-func (r *JsonReference) parse(jsonReferenceString string) (err error) {
-
- r.referenceUrl, err = url.Parse(jsonReferenceString)
- if err != nil {
- return
- }
- refUrl := r.referenceUrl
-
- if refUrl.Scheme != "" && refUrl.Host != "" {
- r.HasFullUrl = true
- } else {
- if refUrl.Path != "" {
- r.HasUrlPathOnly = true
- } else if refUrl.RawQuery == "" && refUrl.Fragment != "" {
- r.HasFragmentOnly = true
- }
- }
-
- r.HasFileScheme = refUrl.Scheme == "file"
- if runtime.GOOS == "windows" {
- // on Windows, a file URL may have an extra leading slash, and if it
- // doesn't then its first component will be treated as the host by the
- // Go runtime
- if refUrl.Host == "" && strings.HasPrefix(refUrl.Path, "/") {
- r.HasFullFilePath = filepath.IsAbs(refUrl.Path[1:])
- } else {
- r.HasFullFilePath = filepath.IsAbs(refUrl.Host + refUrl.Path)
- }
- } else {
- r.HasFullFilePath = filepath.IsAbs(refUrl.Path)
- }
-
- // invalid json-pointer error means url has no json-pointer fragment. simply ignore error
- r.referencePointer, _ = gojsonpointer.NewJsonPointer(refUrl.Fragment)
-
- return
-}
-
-// Creates a new reference from a parent and a child
-// If the child cannot inherit from the parent, an error is returned
-func (r *JsonReference) Inherits(child JsonReference) (*JsonReference, error) {
- if child.GetUrl() == nil {
- return nil, errors.New("childUrl is nil!")
- }
-
- if r.GetUrl() == nil {
- return nil, errors.New("parentUrl is nil!")
- }
-
- // Get a copy of the parent url to make sure we do not modify the original.
- // URL reference resolving fails if the fragment of the child is empty, but the parent's is not.
- // The fragment of the child must be used, so the fragment of the parent is manually removed.
- parentUrl := *r.GetUrl()
- parentUrl.Fragment = ""
-
- ref, err := NewJsonReference(parentUrl.ResolveReference(child.GetUrl()).String())
- if err != nil {
- return nil, err
- }
- return &ref, err
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/.gitignore b/vendor/github.com/xeipuuv/gojsonschema/.gitignore
deleted file mode 100644
index 68e993c..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-*.sw[nop]
-*.iml
-.vscode/
diff --git a/vendor/github.com/xeipuuv/gojsonschema/.travis.yml b/vendor/github.com/xeipuuv/gojsonschema/.travis.yml
deleted file mode 100644
index 3289001..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/.travis.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-language: go
-go:
- - "1.11"
- - "1.12"
- - "1.13"
-before_install:
- - go get github.com/xeipuuv/gojsonreference
- - go get github.com/xeipuuv/gojsonpointer
- - go get github.com/stretchr/testify/assert
diff --git a/vendor/github.com/xeipuuv/gojsonschema/LICENSE-APACHE-2.0.txt b/vendor/github.com/xeipuuv/gojsonschema/LICENSE-APACHE-2.0.txt
deleted file mode 100644
index 55ede8a..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/LICENSE-APACHE-2.0.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2015 xeipuuv
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/xeipuuv/gojsonschema/README.md b/vendor/github.com/xeipuuv/gojsonschema/README.md
deleted file mode 100644
index 758f26d..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/README.md
+++ /dev/null
@@ -1,466 +0,0 @@
-[](https://godoc.org/github.com/xeipuuv/gojsonschema)
-[](https://travis-ci.org/xeipuuv/gojsonschema)
-[](https://goreportcard.com/report/github.com/xeipuuv/gojsonschema)
-
-# gojsonschema
-
-## Description
-
-An implementation of JSON Schema for the Go programming language. Supports draft-04, draft-06 and draft-07.
-
-References :
-
-* http://json-schema.org
-* http://json-schema.org/latest/json-schema-core.html
-* http://json-schema.org/latest/json-schema-validation.html
-
-## Installation
-
-```
-go get github.com/xeipuuv/gojsonschema
-```
-
-Dependencies :
-* [github.com/xeipuuv/gojsonpointer](/~https://github.com/xeipuuv/gojsonpointer)
-* [github.com/xeipuuv/gojsonreference](/~https://github.com/xeipuuv/gojsonreference)
-* [github.com/stretchr/testify/assert](/~https://github.com/stretchr/testify#assert-package)
-
-## Usage
-
-### Example
-
-```go
-
-package main
-
-import (
- "fmt"
- "github.com/xeipuuv/gojsonschema"
-)
-
-func main() {
-
- schemaLoader := gojsonschema.NewReferenceLoader("file:///home/me/schema.json")
- documentLoader := gojsonschema.NewReferenceLoader("file:///home/me/document.json")
-
- result, err := gojsonschema.Validate(schemaLoader, documentLoader)
- if err != nil {
- panic(err.Error())
- }
-
- if result.Valid() {
- fmt.Printf("The document is valid\n")
- } else {
- fmt.Printf("The document is not valid. see errors :\n")
- for _, desc := range result.Errors() {
- fmt.Printf("- %s\n", desc)
- }
- }
-}
-
-
-```
-
-#### Loaders
-
-There are various ways to load your JSON data.
-In order to load your schemas and documents,
-first declare an appropriate loader :
-
-* Web / HTTP, using a reference :
-
-```go
-loader := gojsonschema.NewReferenceLoader("http://www.some_host.com/schema.json")
-```
-
-* Local file, using a reference :
-
-```go
-loader := gojsonschema.NewReferenceLoader("file:///home/me/schema.json")
-```
-
-References use the URI scheme, the prefix (file://) and a full path to the file are required.
-
-* JSON strings :
-
-```go
-loader := gojsonschema.NewStringLoader(`{"type": "string"}`)
-```
-
-* Custom Go types :
-
-```go
-m := map[string]interface{}{"type": "string"}
-loader := gojsonschema.NewGoLoader(m)
-```
-
-And
-
-```go
-type Root struct {
- Users []User `json:"users"`
-}
-
-type User struct {
- Name string `json:"name"`
-}
-
-...
-
-data := Root{}
-data.Users = append(data.Users, User{"John"})
-data.Users = append(data.Users, User{"Sophia"})
-data.Users = append(data.Users, User{"Bill"})
-
-loader := gojsonschema.NewGoLoader(data)
-```
-
-#### Validation
-
-Once the loaders are set, validation is easy :
-
-```go
-result, err := gojsonschema.Validate(schemaLoader, documentLoader)
-```
-
-Alternatively, you might want to load a schema only once and process to multiple validations :
-
-```go
-schema, err := gojsonschema.NewSchema(schemaLoader)
-...
-result1, err := schema.Validate(documentLoader1)
-...
-result2, err := schema.Validate(documentLoader2)
-...
-// etc ...
-```
-
-To check the result :
-
-```go
- if result.Valid() {
- fmt.Printf("The document is valid\n")
- } else {
- fmt.Printf("The document is not valid. see errors :\n")
- for _, err := range result.Errors() {
- // Err implements the ResultError interface
- fmt.Printf("- %s\n", err)
- }
- }
-```
-
-
-## Loading local schemas
-
-By default `file` and `http(s)` references to external schemas are loaded automatically via the file system or via http(s). An external schema can also be loaded using a `SchemaLoader`.
-
-```go
- sl := gojsonschema.NewSchemaLoader()
- loader1 := gojsonschema.NewStringLoader(`{ "type" : "string" }`)
- err := sl.AddSchema("http://some_host.com/string.json", loader1)
-```
-
-Alternatively if your schema already has an `$id` you can use the `AddSchemas` function
-```go
- loader2 := gojsonschema.NewStringLoader(`{
- "$id" : "http://some_host.com/maxlength.json",
- "maxLength" : 5
- }`)
- err = sl.AddSchemas(loader2)
-```
-
-The main schema should be passed to the `Compile` function. This main schema can then directly reference the added schemas without needing to download them.
-```go
- loader3 := gojsonschema.NewStringLoader(`{
- "$id" : "http://some_host.com/main.json",
- "allOf" : [
- { "$ref" : "http://some_host.com/string.json" },
- { "$ref" : "http://some_host.com/maxlength.json" }
- ]
- }`)
-
- schema, err := sl.Compile(loader3)
-
- documentLoader := gojsonschema.NewStringLoader(`"hello world"`)
-
- result, err := schema.Validate(documentLoader)
-```
-
-It's also possible to pass a `ReferenceLoader` to the `Compile` function that references a loaded schema.
-
-```go
-err = sl.AddSchemas(loader3)
-schema, err := sl.Compile(gojsonschema.NewReferenceLoader("http://some_host.com/main.json"))
-```
-
-Schemas added by `AddSchema` and `AddSchemas` are only validated when the entire schema is compiled, unless meta-schema validation is used.
-
-## Using a specific draft
-By default `gojsonschema` will try to detect the draft of a schema by using the `$schema` keyword and parse it in a strict draft-04, draft-06 or draft-07 mode. If `$schema` is missing, or the draft version is not explicitely set, a hybrid mode is used which merges together functionality of all drafts into one mode.
-
-Autodectection can be turned off with the `AutoDetect` property. Specific draft versions can be specified with the `Draft` property.
-
-```go
-sl := gojsonschema.NewSchemaLoader()
-sl.Draft = gojsonschema.Draft7
-sl.AutoDetect = false
-```
-
-If autodetection is on (default), a draft-07 schema can savely reference draft-04 schemas and vice-versa, as long as `$schema` is specified in all schemas.
-
-## Meta-schema validation
-Schemas that are added using the `AddSchema`, `AddSchemas` and `Compile` can be validated against their meta-schema by setting the `Validate` property.
-
-The following example will produce an error as `multipleOf` must be a number. If `Validate` is off (default), this error is only returned at the `Compile` step.
-
-```go
-sl := gojsonschema.NewSchemaLoader()
-sl.Validate = true
-err := sl.AddSchemas(gojsonschema.NewStringLoader(`{
- $id" : "http://some_host.com/invalid.json",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "multipleOf" : true
-}`))
- ```
-```
- ```
-
-Errors returned by meta-schema validation are more readable and contain more information, which helps significantly if you are developing a schema.
-
-Meta-schema validation also works with a custom `$schema`. In case `$schema` is missing, or `AutoDetect` is set to `false`, the meta-schema of the used draft is used.
-
-
-## Working with Errors
-
-The library handles string error codes which you can customize by creating your own gojsonschema.locale and setting it
-```go
-gojsonschema.Locale = YourCustomLocale{}
-```
-
-However, each error contains additional contextual information.
-
-Newer versions of `gojsonschema` may have new additional errors, so code that uses a custom locale will need to be updated when this happens.
-
-**err.Type()**: *string* Returns the "type" of error that occurred. Note you can also type check. See below
-
-Note: An error of RequiredType has an err.Type() return value of "required"
-
- "required": RequiredError
- "invalid_type": InvalidTypeError
- "number_any_of": NumberAnyOfError
- "number_one_of": NumberOneOfError
- "number_all_of": NumberAllOfError
- "number_not": NumberNotError
- "missing_dependency": MissingDependencyError
- "internal": InternalError
- "const": ConstEror
- "enum": EnumError
- "array_no_additional_items": ArrayNoAdditionalItemsError
- "array_min_items": ArrayMinItemsError
- "array_max_items": ArrayMaxItemsError
- "unique": ItemsMustBeUniqueError
- "contains" : ArrayContainsError
- "array_min_properties": ArrayMinPropertiesError
- "array_max_properties": ArrayMaxPropertiesError
- "additional_property_not_allowed": AdditionalPropertyNotAllowedError
- "invalid_property_pattern": InvalidPropertyPatternError
- "invalid_property_name": InvalidPropertyNameError
- "string_gte": StringLengthGTEError
- "string_lte": StringLengthLTEError
- "pattern": DoesNotMatchPatternError
- "multiple_of": MultipleOfError
- "number_gte": NumberGTEError
- "number_gt": NumberGTError
- "number_lte": NumberLTEError
- "number_lt": NumberLTError
- "condition_then" : ConditionThenError
- "condition_else" : ConditionElseError
-
-**err.Value()**: *interface{}* Returns the value given
-
-**err.Context()**: *gojsonschema.JsonContext* Returns the context. This has a String() method that will print something like this: (root).firstName
-
-**err.Field()**: *string* Returns the fieldname in the format firstName, or for embedded properties, person.firstName. This returns the same as the String() method on *err.Context()* but removes the (root). prefix.
-
-**err.Description()**: *string* The error description. This is based on the locale you are using. See the beginning of this section for overwriting the locale with a custom implementation.
-
-**err.DescriptionFormat()**: *string* The error description format. This is relevant if you are adding custom validation errors afterwards to the result.
-
-**err.Details()**: *gojsonschema.ErrorDetails* Returns a map[string]interface{} of additional error details specific to the error. For example, GTE errors will have a "min" value, LTE will have a "max" value. See errors.go for a full description of all the error details. Every error always contains a "field" key that holds the value of *err.Field()*
-
-Note in most cases, the err.Details() will be used to generate replacement strings in your locales, and not used directly. These strings follow the text/template format i.e.
-```
-{{.field}} must be greater than or equal to {{.min}}
-```
-
-The library allows you to specify custom template functions, should you require more complex error message handling.
-```go
-gojsonschema.ErrorTemplateFuncs = map[string]interface{}{
- "allcaps": func(s string) string {
- return strings.ToUpper(s)
- },
-}
-```
-
-Given the above definition, you can use the custom function `"allcaps"` in your localization templates:
-```
-{{allcaps .field}} must be greater than or equal to {{.min}}
-```
-
-The above error message would then be rendered with the `field` value in capital letters. For example:
-```
-"PASSWORD must be greater than or equal to 8"
-```
-
-Learn more about what types of template functions you can use in `ErrorTemplateFuncs` by referring to Go's [text/template FuncMap](https://golang.org/pkg/text/template/#FuncMap) type.
-
-## Formats
-JSON Schema allows for optional "format" property to validate instances against well-known formats. gojsonschema ships with all of the formats defined in the spec that you can use like this:
-
-````json
-{"type": "string", "format": "email"}
-````
-
-Not all formats defined in draft-07 are available. Implemented formats are:
-
-* `date`
-* `time`
-* `date-time`
-* `hostname`. Subdomains that start with a number are also supported, but this means that it doesn't strictly follow [RFC1034](http://tools.ietf.org/html/rfc1034#section-3.5) and has the implication that ipv4 addresses are also recognized as valid hostnames.
-* `email`. Go's email parser deviates slightly from [RFC5322](https://tools.ietf.org/html/rfc5322). Includes unicode support.
-* `idn-email`. Same caveat as `email`.
-* `ipv4`
-* `ipv6`
-* `uri`. Includes unicode support.
-* `uri-reference`. Includes unicode support.
-* `iri`
-* `iri-reference`
-* `uri-template`
-* `uuid`
-* `regex`. Go uses the [RE2](/~https://github.com/google/re2/wiki/Syntax) engine and is not [ECMA262](http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-262.pdf) compatible.
-* `json-pointer`
-* `relative-json-pointer`
-
-`email`, `uri` and `uri-reference` use the same validation code as their unicode counterparts `idn-email`, `iri` and `iri-reference`. If you rely on unicode support you should use the specific
-unicode enabled formats for the sake of interoperability as other implementations might not support unicode in the regular formats.
-
-The validation code for `uri`, `idn-email` and their relatives use mostly standard library code.
-
-For repetitive or more complex formats, you can create custom format checkers and add them to gojsonschema like this:
-
-```go
-// Define the format checker
-type RoleFormatChecker struct {}
-
-// Ensure it meets the gojsonschema.FormatChecker interface
-func (f RoleFormatChecker) IsFormat(input interface{}) bool {
-
- asString, ok := input.(string)
- if ok == false {
- return false
- }
-
- return strings.HasPrefix("ROLE_", asString)
-}
-
-// Add it to the library
-gojsonschema.FormatCheckers.Add("role", RoleFormatChecker{})
-````
-
-Now to use in your json schema:
-````json
-{"type": "string", "format": "role"}
-````
-
-Another example would be to check if the provided integer matches an id on database:
-
-JSON schema:
-```json
-{"type": "integer", "format": "ValidUserId"}
-```
-
-```go
-// Define the format checker
-type ValidUserIdFormatChecker struct {}
-
-// Ensure it meets the gojsonschema.FormatChecker interface
-func (f ValidUserIdFormatChecker) IsFormat(input interface{}) bool {
-
- asFloat64, ok := input.(float64) // Numbers are always float64 here
- if ok == false {
- return false
- }
-
- // XXX
- // do the magic on the database looking for the int(asFloat64)
-
- return true
-}
-
-// Add it to the library
-gojsonschema.FormatCheckers.Add("ValidUserId", ValidUserIdFormatChecker{})
-````
-
-Formats can also be removed, for example if you want to override one of the formats that is defined by default.
-
-```go
-gojsonschema.FormatCheckers.Remove("hostname")
-```
-
-
-## Additional custom validation
-After the validation has run and you have the results, you may add additional
-errors using `Result.AddError`. This is useful to maintain the same format within the resultset instead
-of having to add special exceptions for your own errors. Below is an example.
-
-```go
-type AnswerInvalidError struct {
- gojsonschema.ResultErrorFields
-}
-
-func newAnswerInvalidError(context *gojsonschema.JsonContext, value interface{}, details gojsonschema.ErrorDetails) *AnswerInvalidError {
- err := AnswerInvalidError{}
- err.SetContext(context)
- err.SetType("custom_invalid_error")
- // it is important to use SetDescriptionFormat() as this is used to call SetDescription() after it has been parsed
- // using the description of err will be overridden by this.
- err.SetDescriptionFormat("Answer to the Ultimate Question of Life, the Universe, and Everything is {{.answer}}")
- err.SetValue(value)
- err.SetDetails(details)
-
- return &err
-}
-
-func main() {
- // ...
- schema, err := gojsonschema.NewSchema(schemaLoader)
- result, err := gojsonschema.Validate(schemaLoader, documentLoader)
-
- if true { // some validation
- jsonContext := gojsonschema.NewJsonContext("question", nil)
- errDetail := gojsonschema.ErrorDetails{
- "answer": 42,
- }
- result.AddError(
- newAnswerInvalidError(
- gojsonschema.NewJsonContext("answer", jsonContext),
- 52,
- errDetail,
- ),
- errDetail,
- )
- }
-
- return result, err
-
-}
-```
-
-This is especially useful if you want to add validation beyond what the
-json schema drafts can provide such business specific logic.
-
-## Uses
-
-gojsonschema uses the following test suite :
-
-/~https://github.com/json-schema/JSON-Schema-Test-Suite
diff --git a/vendor/github.com/xeipuuv/gojsonschema/draft.go b/vendor/github.com/xeipuuv/gojsonschema/draft.go
deleted file mode 100644
index 61298e7..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/draft.go
+++ /dev/null
@@ -1,125 +0,0 @@
-// Copyright 2018 johandorland ( /~https://github.com/johandorland )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package gojsonschema
-
-import (
- "errors"
- "math"
- "reflect"
-
- "github.com/xeipuuv/gojsonreference"
-)
-
-// Draft is a JSON-schema draft version
-type Draft int
-
-// Supported Draft versions
-const (
- Draft4 Draft = 4
- Draft6 Draft = 6
- Draft7 Draft = 7
- Hybrid Draft = math.MaxInt32
-)
-
-type draftConfig struct {
- Version Draft
- MetaSchemaURL string
- MetaSchema string
-}
-type draftConfigs []draftConfig
-
-var drafts draftConfigs
-
-func init() {
- drafts = []draftConfig{
- {
- Version: Draft4,
- MetaSchemaURL: "http://json-schema.org/draft-04/schema",
- MetaSchema: `{"id":"http://json-schema.org/draft-04/schema#","$schema":"http://json-schema.org/draft-04/schema#","description":"Core schema meta-schema","definitions":{"schemaArray":{"type":"array","minItems":1,"items":{"$ref":"#"}},"positiveInteger":{"type":"integer","minimum":0},"positiveIntegerDefault0":{"allOf":[{"$ref":"#/definitions/positiveInteger"},{"default":0}]},"simpleTypes":{"enum":["array","boolean","integer","null","number","object","string"]},"stringArray":{"type":"array","items":{"type":"string"},"minItems":1,"uniqueItems":true}},"type":"object","properties":{"id":{"type":"string"},"$schema":{"type":"string"},"title":{"type":"string"},"description":{"type":"string"},"default":{},"multipleOf":{"type":"number","minimum":0,"exclusiveMinimum":true},"maximum":{"type":"number"},"exclusiveMaximum":{"type":"boolean","default":false},"minimum":{"type":"number"},"exclusiveMinimum":{"type":"boolean","default":false},"maxLength":{"$ref":"#/definitions/positiveInteger"},"minLength":{"$ref":"#/definitions/positiveIntegerDefault0"},"pattern":{"type":"string","format":"regex"},"additionalItems":{"anyOf":[{"type":"boolean"},{"$ref":"#"}],"default":{}},"items":{"anyOf":[{"$ref":"#"},{"$ref":"#/definitions/schemaArray"}],"default":{}},"maxItems":{"$ref":"#/definitions/positiveInteger"},"minItems":{"$ref":"#/definitions/positiveIntegerDefault0"},"uniqueItems":{"type":"boolean","default":false},"maxProperties":{"$ref":"#/definitions/positiveInteger"},"minProperties":{"$ref":"#/definitions/positiveIntegerDefault0"},"required":{"$ref":"#/definitions/stringArray"},"additionalProperties":{"anyOf":[{"type":"boolean"},{"$ref":"#"}],"default":{}},"definitions":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"properties":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"patternProperties":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"dependencies":{"type":"object","additionalProperties":{"anyOf":[{"$ref":"#"},{"$ref":"#/definitions/stringArray"}]}},"enum":{"type":"array","minItems":1,"uniqueItems":true},"type":{"anyOf":[{"$ref":"#/definitions/simpleTypes"},{"type":"array","items":{"$ref":"#/definitions/simpleTypes"},"minItems":1,"uniqueItems":true}]},"format":{"type":"string"},"allOf":{"$ref":"#/definitions/schemaArray"},"anyOf":{"$ref":"#/definitions/schemaArray"},"oneOf":{"$ref":"#/definitions/schemaArray"},"not":{"$ref":"#"}},"dependencies":{"exclusiveMaximum":["maximum"],"exclusiveMinimum":["minimum"]},"default":{}}`,
- },
- {
- Version: Draft6,
- MetaSchemaURL: "http://json-schema.org/draft-06/schema",
- MetaSchema: `{"$schema":"http://json-schema.org/draft-06/schema#","$id":"http://json-schema.org/draft-06/schema#","title":"Core schema meta-schema","definitions":{"schemaArray":{"type":"array","minItems":1,"items":{"$ref":"#"}},"nonNegativeInteger":{"type":"integer","minimum":0},"nonNegativeIntegerDefault0":{"allOf":[{"$ref":"#/definitions/nonNegativeInteger"},{"default":0}]},"simpleTypes":{"enum":["array","boolean","integer","null","number","object","string"]},"stringArray":{"type":"array","items":{"type":"string"},"uniqueItems":true,"default":[]}},"type":["object","boolean"],"properties":{"$id":{"type":"string","format":"uri-reference"},"$schema":{"type":"string","format":"uri"},"$ref":{"type":"string","format":"uri-reference"},"title":{"type":"string"},"description":{"type":"string"},"default":{},"examples":{"type":"array","items":{}},"multipleOf":{"type":"number","exclusiveMinimum":0},"maximum":{"type":"number"},"exclusiveMaximum":{"type":"number"},"minimum":{"type":"number"},"exclusiveMinimum":{"type":"number"},"maxLength":{"$ref":"#/definitions/nonNegativeInteger"},"minLength":{"$ref":"#/definitions/nonNegativeIntegerDefault0"},"pattern":{"type":"string","format":"regex"},"additionalItems":{"$ref":"#"},"items":{"anyOf":[{"$ref":"#"},{"$ref":"#/definitions/schemaArray"}],"default":{}},"maxItems":{"$ref":"#/definitions/nonNegativeInteger"},"minItems":{"$ref":"#/definitions/nonNegativeIntegerDefault0"},"uniqueItems":{"type":"boolean","default":false},"contains":{"$ref":"#"},"maxProperties":{"$ref":"#/definitions/nonNegativeInteger"},"minProperties":{"$ref":"#/definitions/nonNegativeIntegerDefault0"},"required":{"$ref":"#/definitions/stringArray"},"additionalProperties":{"$ref":"#"},"definitions":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"properties":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"patternProperties":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"dependencies":{"type":"object","additionalProperties":{"anyOf":[{"$ref":"#"},{"$ref":"#/definitions/stringArray"}]}},"propertyNames":{"$ref":"#"},"const":{},"enum":{"type":"array","minItems":1,"uniqueItems":true},"type":{"anyOf":[{"$ref":"#/definitions/simpleTypes"},{"type":"array","items":{"$ref":"#/definitions/simpleTypes"},"minItems":1,"uniqueItems":true}]},"format":{"type":"string"},"allOf":{"$ref":"#/definitions/schemaArray"},"anyOf":{"$ref":"#/definitions/schemaArray"},"oneOf":{"$ref":"#/definitions/schemaArray"},"not":{"$ref":"#"}},"default":{}}`,
- },
- {
- Version: Draft7,
- MetaSchemaURL: "http://json-schema.org/draft-07/schema",
- MetaSchema: `{"$schema":"http://json-schema.org/draft-07/schema#","$id":"http://json-schema.org/draft-07/schema#","title":"Core schema meta-schema","definitions":{"schemaArray":{"type":"array","minItems":1,"items":{"$ref":"#"}},"nonNegativeInteger":{"type":"integer","minimum":0},"nonNegativeIntegerDefault0":{"allOf":[{"$ref":"#/definitions/nonNegativeInteger"},{"default":0}]},"simpleTypes":{"enum":["array","boolean","integer","null","number","object","string"]},"stringArray":{"type":"array","items":{"type":"string"},"uniqueItems":true,"default":[]}},"type":["object","boolean"],"properties":{"$id":{"type":"string","format":"uri-reference"},"$schema":{"type":"string","format":"uri"},"$ref":{"type":"string","format":"uri-reference"},"$comment":{"type":"string"},"title":{"type":"string"},"description":{"type":"string"},"default":true,"readOnly":{"type":"boolean","default":false},"examples":{"type":"array","items":true},"multipleOf":{"type":"number","exclusiveMinimum":0},"maximum":{"type":"number"},"exclusiveMaximum":{"type":"number"},"minimum":{"type":"number"},"exclusiveMinimum":{"type":"number"},"maxLength":{"$ref":"#/definitions/nonNegativeInteger"},"minLength":{"$ref":"#/definitions/nonNegativeIntegerDefault0"},"pattern":{"type":"string","format":"regex"},"additionalItems":{"$ref":"#"},"items":{"anyOf":[{"$ref":"#"},{"$ref":"#/definitions/schemaArray"}],"default":true},"maxItems":{"$ref":"#/definitions/nonNegativeInteger"},"minItems":{"$ref":"#/definitions/nonNegativeIntegerDefault0"},"uniqueItems":{"type":"boolean","default":false},"contains":{"$ref":"#"},"maxProperties":{"$ref":"#/definitions/nonNegativeInteger"},"minProperties":{"$ref":"#/definitions/nonNegativeIntegerDefault0"},"required":{"$ref":"#/definitions/stringArray"},"additionalProperties":{"$ref":"#"},"definitions":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"properties":{"type":"object","additionalProperties":{"$ref":"#"},"default":{}},"patternProperties":{"type":"object","additionalProperties":{"$ref":"#"},"propertyNames":{"format":"regex"},"default":{}},"dependencies":{"type":"object","additionalProperties":{"anyOf":[{"$ref":"#"},{"$ref":"#/definitions/stringArray"}]}},"propertyNames":{"$ref":"#"},"const":true,"enum":{"type":"array","items":true,"minItems":1,"uniqueItems":true},"type":{"anyOf":[{"$ref":"#/definitions/simpleTypes"},{"type":"array","items":{"$ref":"#/definitions/simpleTypes"},"minItems":1,"uniqueItems":true}]},"format":{"type":"string"},"contentMediaType":{"type":"string"},"contentEncoding":{"type":"string"},"if":{"$ref":"#"},"then":{"$ref":"#"},"else":{"$ref":"#"},"allOf":{"$ref":"#/definitions/schemaArray"},"anyOf":{"$ref":"#/definitions/schemaArray"},"oneOf":{"$ref":"#/definitions/schemaArray"},"not":{"$ref":"#"}},"default":true}`,
- },
- }
-}
-
-func (dc draftConfigs) GetMetaSchema(url string) string {
- for _, config := range dc {
- if config.MetaSchemaURL == url {
- return config.MetaSchema
- }
- }
- return ""
-}
-func (dc draftConfigs) GetDraftVersion(url string) *Draft {
- for _, config := range dc {
- if config.MetaSchemaURL == url {
- return &config.Version
- }
- }
- return nil
-}
-func (dc draftConfigs) GetSchemaURL(draft Draft) string {
- for _, config := range dc {
- if config.Version == draft {
- return config.MetaSchemaURL
- }
- }
- return ""
-}
-
-func parseSchemaURL(documentNode interface{}) (string, *Draft, error) {
-
- if isKind(documentNode, reflect.Bool) {
- return "", nil, nil
- }
-
- if !isKind(documentNode, reflect.Map) {
- return "", nil, errors.New("schema is invalid")
- }
-
- m := documentNode.(map[string]interface{})
-
- if existsMapKey(m, KEY_SCHEMA) {
- if !isKind(m[KEY_SCHEMA], reflect.String) {
- return "", nil, errors.New(formatErrorDescription(
- Locale.MustBeOfType(),
- ErrorDetails{
- "key": KEY_SCHEMA,
- "type": TYPE_STRING,
- },
- ))
- }
-
- schemaReference, err := gojsonreference.NewJsonReference(m[KEY_SCHEMA].(string))
-
- if err != nil {
- return "", nil, err
- }
-
- schema := schemaReference.String()
-
- return schema, drafts.GetDraftVersion(schema), nil
- }
-
- return "", nil, nil
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/errors.go b/vendor/github.com/xeipuuv/gojsonschema/errors.go
deleted file mode 100644
index e4e9814..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/errors.go
+++ /dev/null
@@ -1,364 +0,0 @@
-package gojsonschema
-
-import (
- "bytes"
- "sync"
- "text/template"
-)
-
-var errorTemplates = errorTemplate{template.New("errors-new"), sync.RWMutex{}}
-
-// template.Template is not thread-safe for writing, so some locking is done
-// sync.RWMutex is used for efficiently locking when new templates are created
-type errorTemplate struct {
- *template.Template
- sync.RWMutex
-}
-
-type (
-
- // FalseError. ErrorDetails: -
- FalseError struct {
- ResultErrorFields
- }
-
- // RequiredError indicates that a required field is missing
- // ErrorDetails: property string
- RequiredError struct {
- ResultErrorFields
- }
-
- // InvalidTypeError indicates that a field has the incorrect type
- // ErrorDetails: expected, given
- InvalidTypeError struct {
- ResultErrorFields
- }
-
- // NumberAnyOfError is produced in case of a failing "anyOf" validation
- // ErrorDetails: -
- NumberAnyOfError struct {
- ResultErrorFields
- }
-
- // NumberOneOfError is produced in case of a failing "oneOf" validation
- // ErrorDetails: -
- NumberOneOfError struct {
- ResultErrorFields
- }
-
- // NumberAllOfError is produced in case of a failing "allOf" validation
- // ErrorDetails: -
- NumberAllOfError struct {
- ResultErrorFields
- }
-
- // NumberNotError is produced if a "not" validation failed
- // ErrorDetails: -
- NumberNotError struct {
- ResultErrorFields
- }
-
- // MissingDependencyError is produced in case of a "missing dependency" problem
- // ErrorDetails: dependency
- MissingDependencyError struct {
- ResultErrorFields
- }
-
- // InternalError indicates an internal error
- // ErrorDetails: error
- InternalError struct {
- ResultErrorFields
- }
-
- // ConstError indicates a const error
- // ErrorDetails: allowed
- ConstError struct {
- ResultErrorFields
- }
-
- // EnumError indicates an enum error
- // ErrorDetails: allowed
- EnumError struct {
- ResultErrorFields
- }
-
- // ArrayNoAdditionalItemsError is produced if additional items were found, but not allowed
- // ErrorDetails: -
- ArrayNoAdditionalItemsError struct {
- ResultErrorFields
- }
-
- // ArrayMinItemsError is produced if an array contains less items than the allowed minimum
- // ErrorDetails: min
- ArrayMinItemsError struct {
- ResultErrorFields
- }
-
- // ArrayMaxItemsError is produced if an array contains more items than the allowed maximum
- // ErrorDetails: max
- ArrayMaxItemsError struct {
- ResultErrorFields
- }
-
- // ItemsMustBeUniqueError is produced if an array requires unique items, but contains non-unique items
- // ErrorDetails: type, i, j
- ItemsMustBeUniqueError struct {
- ResultErrorFields
- }
-
- // ArrayContainsError is produced if an array contains invalid items
- // ErrorDetails:
- ArrayContainsError struct {
- ResultErrorFields
- }
-
- // ArrayMinPropertiesError is produced if an object contains less properties than the allowed minimum
- // ErrorDetails: min
- ArrayMinPropertiesError struct {
- ResultErrorFields
- }
-
- // ArrayMaxPropertiesError is produced if an object contains more properties than the allowed maximum
- // ErrorDetails: max
- ArrayMaxPropertiesError struct {
- ResultErrorFields
- }
-
- // AdditionalPropertyNotAllowedError is produced if an object has additional properties, but not allowed
- // ErrorDetails: property
- AdditionalPropertyNotAllowedError struct {
- ResultErrorFields
- }
-
- // InvalidPropertyPatternError is produced if an pattern was found
- // ErrorDetails: property, pattern
- InvalidPropertyPatternError struct {
- ResultErrorFields
- }
-
- // InvalidPropertyNameError is produced if an invalid-named property was found
- // ErrorDetails: property
- InvalidPropertyNameError struct {
- ResultErrorFields
- }
-
- // StringLengthGTEError is produced if a string is shorter than the minimum required length
- // ErrorDetails: min
- StringLengthGTEError struct {
- ResultErrorFields
- }
-
- // StringLengthLTEError is produced if a string is longer than the maximum allowed length
- // ErrorDetails: max
- StringLengthLTEError struct {
- ResultErrorFields
- }
-
- // DoesNotMatchPatternError is produced if a string does not match the defined pattern
- // ErrorDetails: pattern
- DoesNotMatchPatternError struct {
- ResultErrorFields
- }
-
- // DoesNotMatchFormatError is produced if a string does not match the defined format
- // ErrorDetails: format
- DoesNotMatchFormatError struct {
- ResultErrorFields
- }
-
- // MultipleOfError is produced if a number is not a multiple of the defined multipleOf
- // ErrorDetails: multiple
- MultipleOfError struct {
- ResultErrorFields
- }
-
- // NumberGTEError is produced if a number is lower than the allowed minimum
- // ErrorDetails: min
- NumberGTEError struct {
- ResultErrorFields
- }
-
- // NumberGTError is produced if a number is lower than, or equal to the specified minimum, and exclusiveMinimum is set
- // ErrorDetails: min
- NumberGTError struct {
- ResultErrorFields
- }
-
- // NumberLTEError is produced if a number is higher than the allowed maximum
- // ErrorDetails: max
- NumberLTEError struct {
- ResultErrorFields
- }
-
- // NumberLTError is produced if a number is higher than, or equal to the specified maximum, and exclusiveMaximum is set
- // ErrorDetails: max
- NumberLTError struct {
- ResultErrorFields
- }
-
- // ConditionThenError is produced if a condition's "then" validation is invalid
- // ErrorDetails: -
- ConditionThenError struct {
- ResultErrorFields
- }
-
- // ConditionElseError is produced if a condition's "else" condition is invalid
- // ErrorDetails: -
- ConditionElseError struct {
- ResultErrorFields
- }
-)
-
-// newError takes a ResultError type and sets the type, context, description, details, value, and field
-func newError(err ResultError, context *JsonContext, value interface{}, locale locale, details ErrorDetails) {
- var t string
- var d string
- switch err.(type) {
- case *FalseError:
- t = "false"
- d = locale.False()
- case *RequiredError:
- t = "required"
- d = locale.Required()
- case *InvalidTypeError:
- t = "invalid_type"
- d = locale.InvalidType()
- case *NumberAnyOfError:
- t = "number_any_of"
- d = locale.NumberAnyOf()
- case *NumberOneOfError:
- t = "number_one_of"
- d = locale.NumberOneOf()
- case *NumberAllOfError:
- t = "number_all_of"
- d = locale.NumberAllOf()
- case *NumberNotError:
- t = "number_not"
- d = locale.NumberNot()
- case *MissingDependencyError:
- t = "missing_dependency"
- d = locale.MissingDependency()
- case *InternalError:
- t = "internal"
- d = locale.Internal()
- case *ConstError:
- t = "const"
- d = locale.Const()
- case *EnumError:
- t = "enum"
- d = locale.Enum()
- case *ArrayNoAdditionalItemsError:
- t = "array_no_additional_items"
- d = locale.ArrayNoAdditionalItems()
- case *ArrayMinItemsError:
- t = "array_min_items"
- d = locale.ArrayMinItems()
- case *ArrayMaxItemsError:
- t = "array_max_items"
- d = locale.ArrayMaxItems()
- case *ItemsMustBeUniqueError:
- t = "unique"
- d = locale.Unique()
- case *ArrayContainsError:
- t = "contains"
- d = locale.ArrayContains()
- case *ArrayMinPropertiesError:
- t = "array_min_properties"
- d = locale.ArrayMinProperties()
- case *ArrayMaxPropertiesError:
- t = "array_max_properties"
- d = locale.ArrayMaxProperties()
- case *AdditionalPropertyNotAllowedError:
- t = "additional_property_not_allowed"
- d = locale.AdditionalPropertyNotAllowed()
- case *InvalidPropertyPatternError:
- t = "invalid_property_pattern"
- d = locale.InvalidPropertyPattern()
- case *InvalidPropertyNameError:
- t = "invalid_property_name"
- d = locale.InvalidPropertyName()
- case *StringLengthGTEError:
- t = "string_gte"
- d = locale.StringGTE()
- case *StringLengthLTEError:
- t = "string_lte"
- d = locale.StringLTE()
- case *DoesNotMatchPatternError:
- t = "pattern"
- d = locale.DoesNotMatchPattern()
- case *DoesNotMatchFormatError:
- t = "format"
- d = locale.DoesNotMatchFormat()
- case *MultipleOfError:
- t = "multiple_of"
- d = locale.MultipleOf()
- case *NumberGTEError:
- t = "number_gte"
- d = locale.NumberGTE()
- case *NumberGTError:
- t = "number_gt"
- d = locale.NumberGT()
- case *NumberLTEError:
- t = "number_lte"
- d = locale.NumberLTE()
- case *NumberLTError:
- t = "number_lt"
- d = locale.NumberLT()
- case *ConditionThenError:
- t = "condition_then"
- d = locale.ConditionThen()
- case *ConditionElseError:
- t = "condition_else"
- d = locale.ConditionElse()
- }
-
- err.SetType(t)
- err.SetContext(context)
- err.SetValue(value)
- err.SetDetails(details)
- err.SetDescriptionFormat(d)
- details["field"] = err.Field()
-
- if _, exists := details["context"]; !exists && context != nil {
- details["context"] = context.String()
- }
-
- err.SetDescription(formatErrorDescription(err.DescriptionFormat(), details))
-}
-
-// formatErrorDescription takes a string in the default text/template
-// format and converts it to a string with replacements. The fields come
-// from the ErrorDetails struct and vary for each type of error.
-func formatErrorDescription(s string, details ErrorDetails) string {
-
- var tpl *template.Template
- var descrAsBuffer bytes.Buffer
- var err error
-
- errorTemplates.RLock()
- tpl = errorTemplates.Lookup(s)
- errorTemplates.RUnlock()
-
- if tpl == nil {
- errorTemplates.Lock()
- tpl = errorTemplates.New(s)
-
- if ErrorTemplateFuncs != nil {
- tpl.Funcs(ErrorTemplateFuncs)
- }
-
- tpl, err = tpl.Parse(s)
- errorTemplates.Unlock()
-
- if err != nil {
- return err.Error()
- }
- }
-
- err = tpl.Execute(&descrAsBuffer, details)
- if err != nil {
- return err.Error()
- }
-
- return descrAsBuffer.String()
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/format_checkers.go b/vendor/github.com/xeipuuv/gojsonschema/format_checkers.go
deleted file mode 100644
index 873ffc7..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/format_checkers.go
+++ /dev/null
@@ -1,368 +0,0 @@
-package gojsonschema
-
-import (
- "net"
- "net/mail"
- "net/url"
- "regexp"
- "strings"
- "sync"
- "time"
-)
-
-type (
- // FormatChecker is the interface all formatters added to FormatCheckerChain must implement
- FormatChecker interface {
- // IsFormat checks if input has the correct format and type
- IsFormat(input interface{}) bool
- }
-
- // FormatCheckerChain holds the formatters
- FormatCheckerChain struct {
- formatters map[string]FormatChecker
- }
-
- // EmailFormatChecker verifies email address formats
- EmailFormatChecker struct{}
-
- // IPV4FormatChecker verifies IP addresses in the IPv4 format
- IPV4FormatChecker struct{}
-
- // IPV6FormatChecker verifies IP addresses in the IPv6 format
- IPV6FormatChecker struct{}
-
- // DateTimeFormatChecker verifies date/time formats per RFC3339 5.6
- //
- // Valid formats:
- // Partial Time: HH:MM:SS
- // Full Date: YYYY-MM-DD
- // Full Time: HH:MM:SSZ-07:00
- // Date Time: YYYY-MM-DDTHH:MM:SSZ-0700
- //
- // Where
- // YYYY = 4DIGIT year
- // MM = 2DIGIT month ; 01-12
- // DD = 2DIGIT day-month ; 01-28, 01-29, 01-30, 01-31 based on month/year
- // HH = 2DIGIT hour ; 00-23
- // MM = 2DIGIT ; 00-59
- // SS = 2DIGIT ; 00-58, 00-60 based on leap second rules
- // T = Literal
- // Z = Literal
- //
- // Note: Nanoseconds are also suported in all formats
- //
- // http://tools.ietf.org/html/rfc3339#section-5.6
- DateTimeFormatChecker struct{}
-
- // DateFormatChecker verifies date formats
- //
- // Valid format:
- // Full Date: YYYY-MM-DD
- //
- // Where
- // YYYY = 4DIGIT year
- // MM = 2DIGIT month ; 01-12
- // DD = 2DIGIT day-month ; 01-28, 01-29, 01-30, 01-31 based on month/year
- DateFormatChecker struct{}
-
- // TimeFormatChecker verifies time formats
- //
- // Valid formats:
- // Partial Time: HH:MM:SS
- // Full Time: HH:MM:SSZ-07:00
- //
- // Where
- // HH = 2DIGIT hour ; 00-23
- // MM = 2DIGIT ; 00-59
- // SS = 2DIGIT ; 00-58, 00-60 based on leap second rules
- // T = Literal
- // Z = Literal
- TimeFormatChecker struct{}
-
- // URIFormatChecker validates a URI with a valid Scheme per RFC3986
- URIFormatChecker struct{}
-
- // URIReferenceFormatChecker validates a URI or relative-reference per RFC3986
- URIReferenceFormatChecker struct{}
-
- // URITemplateFormatChecker validates a URI template per RFC6570
- URITemplateFormatChecker struct{}
-
- // HostnameFormatChecker validates a hostname is in the correct format
- HostnameFormatChecker struct{}
-
- // UUIDFormatChecker validates a UUID is in the correct format
- UUIDFormatChecker struct{}
-
- // RegexFormatChecker validates a regex is in the correct format
- RegexFormatChecker struct{}
-
- // JSONPointerFormatChecker validates a JSON Pointer per RFC6901
- JSONPointerFormatChecker struct{}
-
- // RelativeJSONPointerFormatChecker validates a relative JSON Pointer is in the correct format
- RelativeJSONPointerFormatChecker struct{}
-)
-
-var (
- // FormatCheckers holds the valid formatters, and is a public variable
- // so library users can add custom formatters
- FormatCheckers = FormatCheckerChain{
- formatters: map[string]FormatChecker{
- "date": DateFormatChecker{},
- "time": TimeFormatChecker{},
- "date-time": DateTimeFormatChecker{},
- "hostname": HostnameFormatChecker{},
- "email": EmailFormatChecker{},
- "idn-email": EmailFormatChecker{},
- "ipv4": IPV4FormatChecker{},
- "ipv6": IPV6FormatChecker{},
- "uri": URIFormatChecker{},
- "uri-reference": URIReferenceFormatChecker{},
- "iri": URIFormatChecker{},
- "iri-reference": URIReferenceFormatChecker{},
- "uri-template": URITemplateFormatChecker{},
- "uuid": UUIDFormatChecker{},
- "regex": RegexFormatChecker{},
- "json-pointer": JSONPointerFormatChecker{},
- "relative-json-pointer": RelativeJSONPointerFormatChecker{},
- },
- }
-
- // Regex credit: https://www.socketloop.com/tutorials/golang-validate-hostname
- rxHostname = regexp.MustCompile(`^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$`)
-
- // Use a regex to make sure curly brackets are balanced properly after validating it as a AURI
- rxURITemplate = regexp.MustCompile("^([^{]*({[^}]*})?)*$")
-
- rxUUID = regexp.MustCompile("^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$")
-
- rxJSONPointer = regexp.MustCompile("^(?:/(?:[^~/]|~0|~1)*)*$")
-
- rxRelJSONPointer = regexp.MustCompile("^(?:0|[1-9][0-9]*)(?:#|(?:/(?:[^~/]|~0|~1)*)*)$")
-
- lock = new(sync.RWMutex)
-)
-
-// Add adds a FormatChecker to the FormatCheckerChain
-// The name used will be the value used for the format key in your json schema
-func (c *FormatCheckerChain) Add(name string, f FormatChecker) *FormatCheckerChain {
- lock.Lock()
- c.formatters[name] = f
- lock.Unlock()
-
- return c
-}
-
-// Remove deletes a FormatChecker from the FormatCheckerChain (if it exists)
-func (c *FormatCheckerChain) Remove(name string) *FormatCheckerChain {
- lock.Lock()
- delete(c.formatters, name)
- lock.Unlock()
-
- return c
-}
-
-// Has checks to see if the FormatCheckerChain holds a FormatChecker with the given name
-func (c *FormatCheckerChain) Has(name string) bool {
- lock.RLock()
- _, ok := c.formatters[name]
- lock.RUnlock()
-
- return ok
-}
-
-// IsFormat will check an input against a FormatChecker with the given name
-// to see if it is the correct format
-func (c *FormatCheckerChain) IsFormat(name string, input interface{}) bool {
- lock.RLock()
- f, ok := c.formatters[name]
- lock.RUnlock()
-
- // If a format is unrecognized it should always pass validation
- if !ok {
- return true
- }
-
- return f.IsFormat(input)
-}
-
-// IsFormat checks if input is a correctly formatted e-mail address
-func (f EmailFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- _, err := mail.ParseAddress(asString)
- return err == nil
-}
-
-// IsFormat checks if input is a correctly formatted IPv4-address
-func (f IPV4FormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- // Credit: /~https://github.com/asaskevich/govalidator
- ip := net.ParseIP(asString)
- return ip != nil && strings.Contains(asString, ".")
-}
-
-// IsFormat checks if input is a correctly formatted IPv6=address
-func (f IPV6FormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- // Credit: /~https://github.com/asaskevich/govalidator
- ip := net.ParseIP(asString)
- return ip != nil && strings.Contains(asString, ":")
-}
-
-// IsFormat checks if input is a correctly formatted date/time per RFC3339 5.6
-func (f DateTimeFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- formats := []string{
- "15:04:05",
- "15:04:05Z07:00",
- "2006-01-02",
- time.RFC3339,
- time.RFC3339Nano,
- }
-
- for _, format := range formats {
- if _, err := time.Parse(format, asString); err == nil {
- return true
- }
- }
-
- return false
-}
-
-// IsFormat checks if input is a correctly formatted date (YYYY-MM-DD)
-func (f DateFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
- _, err := time.Parse("2006-01-02", asString)
- return err == nil
-}
-
-// IsFormat checks if input correctly formatted time (HH:MM:SS or HH:MM:SSZ-07:00)
-func (f TimeFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- if _, err := time.Parse("15:04:05Z07:00", asString); err == nil {
- return true
- }
-
- _, err := time.Parse("15:04:05", asString)
- return err == nil
-}
-
-// IsFormat checks if input is correctly formatted URI with a valid Scheme per RFC3986
-func (f URIFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- u, err := url.Parse(asString)
-
- if err != nil || u.Scheme == "" {
- return false
- }
-
- return !strings.Contains(asString, `\`)
-}
-
-// IsFormat checks if input is a correctly formatted URI or relative-reference per RFC3986
-func (f URIReferenceFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- _, err := url.Parse(asString)
- return err == nil && !strings.Contains(asString, `\`)
-}
-
-// IsFormat checks if input is a correctly formatted URI template per RFC6570
-func (f URITemplateFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- u, err := url.Parse(asString)
- if err != nil || strings.Contains(asString, `\`) {
- return false
- }
-
- return rxURITemplate.MatchString(u.Path)
-}
-
-// IsFormat checks if input is a correctly formatted hostname
-func (f HostnameFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- return rxHostname.MatchString(asString) && len(asString) < 256
-}
-
-// IsFormat checks if input is a correctly formatted UUID
-func (f UUIDFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- return rxUUID.MatchString(asString)
-}
-
-// IsFormat checks if input is a correctly formatted regular expression
-func (f RegexFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- if asString == "" {
- return true
- }
- _, err := regexp.Compile(asString)
- return err == nil
-}
-
-// IsFormat checks if input is a correctly formatted JSON Pointer per RFC6901
-func (f JSONPointerFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- return rxJSONPointer.MatchString(asString)
-}
-
-// IsFormat checks if input is a correctly formatted relative JSON Pointer
-func (f RelativeJSONPointerFormatChecker) IsFormat(input interface{}) bool {
- asString, ok := input.(string)
- if !ok {
- return false
- }
-
- return rxRelJSONPointer.MatchString(asString)
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/glide.yaml b/vendor/github.com/xeipuuv/gojsonschema/glide.yaml
deleted file mode 100644
index ab6fb86..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/glide.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
-package: github.com/xeipuuv/gojsonschema
-license: Apache 2.0
-import:
-- package: github.com/xeipuuv/gojsonschema
-
-- package: github.com/xeipuuv/gojsonpointer
-
-- package: github.com/xeipuuv/gojsonreference
-
-testImport:
-- package: github.com/stretchr/testify
- subpackages:
- - assert
diff --git a/vendor/github.com/xeipuuv/gojsonschema/go.mod b/vendor/github.com/xeipuuv/gojsonschema/go.mod
deleted file mode 100644
index b709d7f..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/go.mod
+++ /dev/null
@@ -1,7 +0,0 @@
-module github.com/xeipuuv/gojsonschema
-
-require (
- github.com/stretchr/testify v1.3.0
- github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect
- github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415
-)
diff --git a/vendor/github.com/xeipuuv/gojsonschema/go.sum b/vendor/github.com/xeipuuv/gojsonschema/go.sum
deleted file mode 100644
index 0e865ac..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/go.sum
+++ /dev/null
@@ -1,11 +0,0 @@
-github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
-github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
-github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
-github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
-github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
-github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
-github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
diff --git a/vendor/github.com/xeipuuv/gojsonschema/internalLog.go b/vendor/github.com/xeipuuv/gojsonschema/internalLog.go
deleted file mode 100644
index 4ef7a8d..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/internalLog.go
+++ /dev/null
@@ -1,37 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Very simple log wrapper.
-// Used for debugging/testing purposes.
-//
-// created 01-01-2015
-
-package gojsonschema
-
-import (
- "log"
-)
-
-const internalLogEnabled = false
-
-func internalLog(format string, v ...interface{}) {
- log.Printf(format, v...)
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/jsonContext.go b/vendor/github.com/xeipuuv/gojsonschema/jsonContext.go
deleted file mode 100644
index 0e97970..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/jsonContext.go
+++ /dev/null
@@ -1,73 +0,0 @@
-// Copyright 2013 MongoDB, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author tolsen
-// author-github /~https://github.com/tolsen
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Implements a persistent (immutable w/ shared structure) singly-linked list of strings for the purpose of storing a json context
-//
-// created 04-09-2013
-
-package gojsonschema
-
-import "bytes"
-
-// JsonContext implements a persistent linked-list of strings
-type JsonContext struct {
- head string
- tail *JsonContext
-}
-
-// NewJsonContext creates a new JsonContext
-func NewJsonContext(head string, tail *JsonContext) *JsonContext {
- return &JsonContext{head, tail}
-}
-
-// String displays the context in reverse.
-// This plays well with the data structure's persistent nature with
-// Cons and a json document's tree structure.
-func (c *JsonContext) String(del ...string) string {
- byteArr := make([]byte, 0, c.stringLen())
- buf := bytes.NewBuffer(byteArr)
- c.writeStringToBuffer(buf, del)
-
- return buf.String()
-}
-
-func (c *JsonContext) stringLen() int {
- length := 0
- if c.tail != nil {
- length = c.tail.stringLen() + 1 // add 1 for "."
- }
-
- length += len(c.head)
- return length
-}
-
-func (c *JsonContext) writeStringToBuffer(buf *bytes.Buffer, del []string) {
- if c.tail != nil {
- c.tail.writeStringToBuffer(buf, del)
-
- if len(del) > 0 {
- buf.WriteString(del[0])
- } else {
- buf.WriteString(".")
- }
- }
-
- buf.WriteString(c.head)
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/jsonLoader.go b/vendor/github.com/xeipuuv/gojsonschema/jsonLoader.go
deleted file mode 100644
index 5d88af2..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/jsonLoader.go
+++ /dev/null
@@ -1,386 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Different strategies to load JSON files.
-// Includes References (file and HTTP), JSON strings and Go types.
-//
-// created 01-02-2015
-
-package gojsonschema
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "io"
- "io/ioutil"
- "net/http"
- "net/url"
- "os"
- "path/filepath"
- "runtime"
- "strings"
-
- "github.com/xeipuuv/gojsonreference"
-)
-
-var osFS = osFileSystem(os.Open)
-
-// JSONLoader defines the JSON loader interface
-type JSONLoader interface {
- JsonSource() interface{}
- LoadJSON() (interface{}, error)
- JsonReference() (gojsonreference.JsonReference, error)
- LoaderFactory() JSONLoaderFactory
-}
-
-// JSONLoaderFactory defines the JSON loader factory interface
-type JSONLoaderFactory interface {
- // New creates a new JSON loader for the given source
- New(source string) JSONLoader
-}
-
-// DefaultJSONLoaderFactory is the default JSON loader factory
-type DefaultJSONLoaderFactory struct {
-}
-
-// FileSystemJSONLoaderFactory is a JSON loader factory that uses http.FileSystem
-type FileSystemJSONLoaderFactory struct {
- fs http.FileSystem
-}
-
-// New creates a new JSON loader for the given source
-func (d DefaultJSONLoaderFactory) New(source string) JSONLoader {
- return &jsonReferenceLoader{
- fs: osFS,
- source: source,
- }
-}
-
-// New creates a new JSON loader for the given source
-func (f FileSystemJSONLoaderFactory) New(source string) JSONLoader {
- return &jsonReferenceLoader{
- fs: f.fs,
- source: source,
- }
-}
-
-// osFileSystem is a functional wrapper for os.Open that implements http.FileSystem.
-type osFileSystem func(string) (*os.File, error)
-
-// Opens a file with the given name
-func (o osFileSystem) Open(name string) (http.File, error) {
- return o(name)
-}
-
-// JSON Reference loader
-// references are used to load JSONs from files and HTTP
-
-type jsonReferenceLoader struct {
- fs http.FileSystem
- source string
-}
-
-func (l *jsonReferenceLoader) JsonSource() interface{} {
- return l.source
-}
-
-func (l *jsonReferenceLoader) JsonReference() (gojsonreference.JsonReference, error) {
- return gojsonreference.NewJsonReference(l.JsonSource().(string))
-}
-
-func (l *jsonReferenceLoader) LoaderFactory() JSONLoaderFactory {
- return &FileSystemJSONLoaderFactory{
- fs: l.fs,
- }
-}
-
-// NewReferenceLoader returns a JSON reference loader using the given source and the local OS file system.
-func NewReferenceLoader(source string) JSONLoader {
- return &jsonReferenceLoader{
- fs: osFS,
- source: source,
- }
-}
-
-// NewReferenceLoaderFileSystem returns a JSON reference loader using the given source and file system.
-func NewReferenceLoaderFileSystem(source string, fs http.FileSystem) JSONLoader {
- return &jsonReferenceLoader{
- fs: fs,
- source: source,
- }
-}
-
-func (l *jsonReferenceLoader) LoadJSON() (interface{}, error) {
-
- var err error
-
- reference, err := gojsonreference.NewJsonReference(l.JsonSource().(string))
- if err != nil {
- return nil, err
- }
-
- refToURL := reference
- refToURL.GetUrl().Fragment = ""
-
- var document interface{}
-
- if reference.HasFileScheme {
-
- filename := strings.TrimPrefix(refToURL.String(), "file://")
- filename, err = url.QueryUnescape(filename)
-
- if err != nil {
- return nil, err
- }
-
- if runtime.GOOS == "windows" {
- // on Windows, a file URL may have an extra leading slash, use slashes
- // instead of backslashes, and have spaces escaped
- filename = strings.TrimPrefix(filename, "/")
- filename = filepath.FromSlash(filename)
- }
-
- document, err = l.loadFromFile(filename)
- if err != nil {
- return nil, err
- }
-
- } else {
-
- document, err = l.loadFromHTTP(refToURL.String())
- if err != nil {
- return nil, err
- }
-
- }
-
- return document, nil
-
-}
-
-func (l *jsonReferenceLoader) loadFromHTTP(address string) (interface{}, error) {
-
- // returned cached versions for metaschemas for drafts 4, 6 and 7
- // for performance and allow for easier offline use
- if metaSchema := drafts.GetMetaSchema(address); metaSchema != "" {
- return decodeJSONUsingNumber(strings.NewReader(metaSchema))
- }
-
- resp, err := http.Get(address)
- if err != nil {
- return nil, err
- }
-
- // must return HTTP Status 200 OK
- if resp.StatusCode != http.StatusOK {
- return nil, errors.New(formatErrorDescription(Locale.HttpBadStatus(), ErrorDetails{"status": resp.Status}))
- }
-
- bodyBuff, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
-
- return decodeJSONUsingNumber(bytes.NewReader(bodyBuff))
-}
-
-func (l *jsonReferenceLoader) loadFromFile(path string) (interface{}, error) {
- f, err := l.fs.Open(path)
- if err != nil {
- return nil, err
- }
- defer f.Close()
-
- bodyBuff, err := ioutil.ReadAll(f)
- if err != nil {
- return nil, err
- }
-
- return decodeJSONUsingNumber(bytes.NewReader(bodyBuff))
-
-}
-
-// JSON string loader
-
-type jsonStringLoader struct {
- source string
-}
-
-func (l *jsonStringLoader) JsonSource() interface{} {
- return l.source
-}
-
-func (l *jsonStringLoader) JsonReference() (gojsonreference.JsonReference, error) {
- return gojsonreference.NewJsonReference("#")
-}
-
-func (l *jsonStringLoader) LoaderFactory() JSONLoaderFactory {
- return &DefaultJSONLoaderFactory{}
-}
-
-// NewStringLoader creates a new JSONLoader, taking a string as source
-func NewStringLoader(source string) JSONLoader {
- return &jsonStringLoader{source: source}
-}
-
-func (l *jsonStringLoader) LoadJSON() (interface{}, error) {
-
- return decodeJSONUsingNumber(strings.NewReader(l.JsonSource().(string)))
-
-}
-
-// JSON bytes loader
-
-type jsonBytesLoader struct {
- source []byte
-}
-
-func (l *jsonBytesLoader) JsonSource() interface{} {
- return l.source
-}
-
-func (l *jsonBytesLoader) JsonReference() (gojsonreference.JsonReference, error) {
- return gojsonreference.NewJsonReference("#")
-}
-
-func (l *jsonBytesLoader) LoaderFactory() JSONLoaderFactory {
- return &DefaultJSONLoaderFactory{}
-}
-
-// NewBytesLoader creates a new JSONLoader, taking a `[]byte` as source
-func NewBytesLoader(source []byte) JSONLoader {
- return &jsonBytesLoader{source: source}
-}
-
-func (l *jsonBytesLoader) LoadJSON() (interface{}, error) {
- return decodeJSONUsingNumber(bytes.NewReader(l.JsonSource().([]byte)))
-}
-
-// JSON Go (types) loader
-// used to load JSONs from the code as maps, interface{}, structs ...
-
-type jsonGoLoader struct {
- source interface{}
-}
-
-func (l *jsonGoLoader) JsonSource() interface{} {
- return l.source
-}
-
-func (l *jsonGoLoader) JsonReference() (gojsonreference.JsonReference, error) {
- return gojsonreference.NewJsonReference("#")
-}
-
-func (l *jsonGoLoader) LoaderFactory() JSONLoaderFactory {
- return &DefaultJSONLoaderFactory{}
-}
-
-// NewGoLoader creates a new JSONLoader from a given Go struct
-func NewGoLoader(source interface{}) JSONLoader {
- return &jsonGoLoader{source: source}
-}
-
-func (l *jsonGoLoader) LoadJSON() (interface{}, error) {
-
- // convert it to a compliant JSON first to avoid types "mismatches"
-
- jsonBytes, err := json.Marshal(l.JsonSource())
- if err != nil {
- return nil, err
- }
-
- return decodeJSONUsingNumber(bytes.NewReader(jsonBytes))
-
-}
-
-type jsonIOLoader struct {
- buf *bytes.Buffer
-}
-
-// NewReaderLoader creates a new JSON loader using the provided io.Reader
-func NewReaderLoader(source io.Reader) (JSONLoader, io.Reader) {
- buf := &bytes.Buffer{}
- return &jsonIOLoader{buf: buf}, io.TeeReader(source, buf)
-}
-
-// NewWriterLoader creates a new JSON loader using the provided io.Writer
-func NewWriterLoader(source io.Writer) (JSONLoader, io.Writer) {
- buf := &bytes.Buffer{}
- return &jsonIOLoader{buf: buf}, io.MultiWriter(source, buf)
-}
-
-func (l *jsonIOLoader) JsonSource() interface{} {
- return l.buf.String()
-}
-
-func (l *jsonIOLoader) LoadJSON() (interface{}, error) {
- return decodeJSONUsingNumber(l.buf)
-}
-
-func (l *jsonIOLoader) JsonReference() (gojsonreference.JsonReference, error) {
- return gojsonreference.NewJsonReference("#")
-}
-
-func (l *jsonIOLoader) LoaderFactory() JSONLoaderFactory {
- return &DefaultJSONLoaderFactory{}
-}
-
-// JSON raw loader
-// In case the JSON is already marshalled to interface{} use this loader
-// This is used for testing as otherwise there is no guarantee the JSON is marshalled
-// "properly" by using https://golang.org/pkg/encoding/json/#Decoder.UseNumber
-type jsonRawLoader struct {
- source interface{}
-}
-
-// NewRawLoader creates a new JSON raw loader for the given source
-func NewRawLoader(source interface{}) JSONLoader {
- return &jsonRawLoader{source: source}
-}
-func (l *jsonRawLoader) JsonSource() interface{} {
- return l.source
-}
-func (l *jsonRawLoader) LoadJSON() (interface{}, error) {
- return l.source, nil
-}
-func (l *jsonRawLoader) JsonReference() (gojsonreference.JsonReference, error) {
- return gojsonreference.NewJsonReference("#")
-}
-func (l *jsonRawLoader) LoaderFactory() JSONLoaderFactory {
- return &DefaultJSONLoaderFactory{}
-}
-
-func decodeJSONUsingNumber(r io.Reader) (interface{}, error) {
-
- var document interface{}
-
- decoder := json.NewDecoder(r)
- decoder.UseNumber()
-
- err := decoder.Decode(&document)
- if err != nil {
- return nil, err
- }
-
- return document, nil
-
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/locales.go b/vendor/github.com/xeipuuv/gojsonschema/locales.go
deleted file mode 100644
index a416225..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/locales.go
+++ /dev/null
@@ -1,472 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Contains const string and messages.
-//
-// created 01-01-2015
-
-package gojsonschema
-
-type (
- // locale is an interface for defining custom error strings
- locale interface {
-
- // False returns a format-string for "false" schema validation errors
- False() string
-
- // Required returns a format-string for "required" schema validation errors
- Required() string
-
- // InvalidType returns a format-string for "invalid type" schema validation errors
- InvalidType() string
-
- // NumberAnyOf returns a format-string for "anyOf" schema validation errors
- NumberAnyOf() string
-
- // NumberOneOf returns a format-string for "oneOf" schema validation errors
- NumberOneOf() string
-
- // NumberAllOf returns a format-string for "allOf" schema validation errors
- NumberAllOf() string
-
- // NumberNot returns a format-string to format a NumberNotError
- NumberNot() string
-
- // MissingDependency returns a format-string for "missing dependency" schema validation errors
- MissingDependency() string
-
- // Internal returns a format-string for internal errors
- Internal() string
-
- // Const returns a format-string to format a ConstError
- Const() string
-
- // Enum returns a format-string to format an EnumError
- Enum() string
-
- // ArrayNotEnoughItems returns a format-string to format an error for arrays having not enough items to match positional list of schema
- ArrayNotEnoughItems() string
-
- // ArrayNoAdditionalItems returns a format-string to format an ArrayNoAdditionalItemsError
- ArrayNoAdditionalItems() string
-
- // ArrayMinItems returns a format-string to format an ArrayMinItemsError
- ArrayMinItems() string
-
- // ArrayMaxItems returns a format-string to format an ArrayMaxItemsError
- ArrayMaxItems() string
-
- // Unique returns a format-string to format an ItemsMustBeUniqueError
- Unique() string
-
- // ArrayContains returns a format-string to format an ArrayContainsError
- ArrayContains() string
-
- // ArrayMinProperties returns a format-string to format an ArrayMinPropertiesError
- ArrayMinProperties() string
-
- // ArrayMaxProperties returns a format-string to format an ArrayMaxPropertiesError
- ArrayMaxProperties() string
-
- // AdditionalPropertyNotAllowed returns a format-string to format an AdditionalPropertyNotAllowedError
- AdditionalPropertyNotAllowed() string
-
- // InvalidPropertyPattern returns a format-string to format an InvalidPropertyPatternError
- InvalidPropertyPattern() string
-
- // InvalidPropertyName returns a format-string to format an InvalidPropertyNameError
- InvalidPropertyName() string
-
- // StringGTE returns a format-string to format an StringLengthGTEError
- StringGTE() string
-
- // StringLTE returns a format-string to format an StringLengthLTEError
- StringLTE() string
-
- // DoesNotMatchPattern returns a format-string to format an DoesNotMatchPatternError
- DoesNotMatchPattern() string
-
- // DoesNotMatchFormat returns a format-string to format an DoesNotMatchFormatError
- DoesNotMatchFormat() string
-
- // MultipleOf returns a format-string to format an MultipleOfError
- MultipleOf() string
-
- // NumberGTE returns a format-string to format an NumberGTEError
- NumberGTE() string
-
- // NumberGT returns a format-string to format an NumberGTError
- NumberGT() string
-
- // NumberLTE returns a format-string to format an NumberLTEError
- NumberLTE() string
-
- // NumberLT returns a format-string to format an NumberLTError
- NumberLT() string
-
- // Schema validations
-
- // RegexPattern returns a format-string to format a regex-pattern error
- RegexPattern() string
-
- // GreaterThanZero returns a format-string to format an error where a number must be greater than zero
- GreaterThanZero() string
-
- // MustBeOfA returns a format-string to format an error where a value is of the wrong type
- MustBeOfA() string
-
- // MustBeOfAn returns a format-string to format an error where a value is of the wrong type
- MustBeOfAn() string
-
- // CannotBeUsedWithout returns a format-string to format a "cannot be used without" error
- CannotBeUsedWithout() string
-
- // CannotBeGT returns a format-string to format an error where a value are greater than allowed
- CannotBeGT() string
-
- // MustBeOfType returns a format-string to format an error where a value does not match the required type
- MustBeOfType() string
-
- // MustBeValidRegex returns a format-string to format an error where a regex is invalid
- MustBeValidRegex() string
-
- // MustBeValidFormat returns a format-string to format an error where a value does not match the expected format
- MustBeValidFormat() string
-
- // MustBeGTEZero returns a format-string to format an error where a value must be greater or equal than 0
- MustBeGTEZero() string
-
- // KeyCannotBeGreaterThan returns a format-string to format an error where a key is greater than the maximum allowed
- KeyCannotBeGreaterThan() string
-
- // KeyItemsMustBeOfType returns a format-string to format an error where a key is of the wrong type
- KeyItemsMustBeOfType() string
-
- // KeyItemsMustBeUnique returns a format-string to format an error where keys are not unique
- KeyItemsMustBeUnique() string
-
- // ReferenceMustBeCanonical returns a format-string to format a "reference must be canonical" error
- ReferenceMustBeCanonical() string
-
- // NotAValidType returns a format-string to format an invalid type error
- NotAValidType() string
-
- // Duplicated returns a format-string to format an error where types are duplicated
- Duplicated() string
-
- // HttpBadStatus returns a format-string for errors when loading a schema using HTTP
- HttpBadStatus() string
-
- // ParseError returns a format-string for JSON parsing errors
- ParseError() string
-
- // ConditionThen returns a format-string for ConditionThenError errors
- ConditionThen() string
-
- // ConditionElse returns a format-string for ConditionElseError errors
- ConditionElse() string
-
- // ErrorFormat returns a format string for errors
- ErrorFormat() string
- }
-
- // DefaultLocale is the default locale for this package
- DefaultLocale struct{}
-)
-
-// False returns a format-string for "false" schema validation errors
-func (l DefaultLocale) False() string {
- return "False always fails validation"
-}
-
-// Required returns a format-string for "required" schema validation errors
-func (l DefaultLocale) Required() string {
- return `{{.property}} is required`
-}
-
-// InvalidType returns a format-string for "invalid type" schema validation errors
-func (l DefaultLocale) InvalidType() string {
- return `Invalid type. Expected: {{.expected}}, given: {{.given}}`
-}
-
-// NumberAnyOf returns a format-string for "anyOf" schema validation errors
-func (l DefaultLocale) NumberAnyOf() string {
- return `Must validate at least one schema (anyOf)`
-}
-
-// NumberOneOf returns a format-string for "oneOf" schema validation errors
-func (l DefaultLocale) NumberOneOf() string {
- return `Must validate one and only one schema (oneOf)`
-}
-
-// NumberAllOf returns a format-string for "allOf" schema validation errors
-func (l DefaultLocale) NumberAllOf() string {
- return `Must validate all the schemas (allOf)`
-}
-
-// NumberNot returns a format-string to format a NumberNotError
-func (l DefaultLocale) NumberNot() string {
- return `Must not validate the schema (not)`
-}
-
-// MissingDependency returns a format-string for "missing dependency" schema validation errors
-func (l DefaultLocale) MissingDependency() string {
- return `Has a dependency on {{.dependency}}`
-}
-
-// Internal returns a format-string for internal errors
-func (l DefaultLocale) Internal() string {
- return `Internal Error {{.error}}`
-}
-
-// Const returns a format-string to format a ConstError
-func (l DefaultLocale) Const() string {
- return `{{.field}} does not match: {{.allowed}}`
-}
-
-// Enum returns a format-string to format an EnumError
-func (l DefaultLocale) Enum() string {
- return `{{.field}} must be one of the following: {{.allowed}}`
-}
-
-// ArrayNoAdditionalItems returns a format-string to format an ArrayNoAdditionalItemsError
-func (l DefaultLocale) ArrayNoAdditionalItems() string {
- return `No additional items allowed on array`
-}
-
-// ArrayNotEnoughItems returns a format-string to format an error for arrays having not enough items to match positional list of schema
-func (l DefaultLocale) ArrayNotEnoughItems() string {
- return `Not enough items on array to match positional list of schema`
-}
-
-// ArrayMinItems returns a format-string to format an ArrayMinItemsError
-func (l DefaultLocale) ArrayMinItems() string {
- return `Array must have at least {{.min}} items`
-}
-
-// ArrayMaxItems returns a format-string to format an ArrayMaxItemsError
-func (l DefaultLocale) ArrayMaxItems() string {
- return `Array must have at most {{.max}} items`
-}
-
-// Unique returns a format-string to format an ItemsMustBeUniqueError
-func (l DefaultLocale) Unique() string {
- return `{{.type}} items[{{.i}},{{.j}}] must be unique`
-}
-
-// ArrayContains returns a format-string to format an ArrayContainsError
-func (l DefaultLocale) ArrayContains() string {
- return `At least one of the items must match`
-}
-
-// ArrayMinProperties returns a format-string to format an ArrayMinPropertiesError
-func (l DefaultLocale) ArrayMinProperties() string {
- return `Must have at least {{.min}} properties`
-}
-
-// ArrayMaxProperties returns a format-string to format an ArrayMaxPropertiesError
-func (l DefaultLocale) ArrayMaxProperties() string {
- return `Must have at most {{.max}} properties`
-}
-
-// AdditionalPropertyNotAllowed returns a format-string to format an AdditionalPropertyNotAllowedError
-func (l DefaultLocale) AdditionalPropertyNotAllowed() string {
- return `Additional property {{.property}} is not allowed`
-}
-
-// InvalidPropertyPattern returns a format-string to format an InvalidPropertyPatternError
-func (l DefaultLocale) InvalidPropertyPattern() string {
- return `Property "{{.property}}" does not match pattern {{.pattern}}`
-}
-
-// InvalidPropertyName returns a format-string to format an InvalidPropertyNameError
-func (l DefaultLocale) InvalidPropertyName() string {
- return `Property name of "{{.property}}" does not match`
-}
-
-// StringGTE returns a format-string to format an StringLengthGTEError
-func (l DefaultLocale) StringGTE() string {
- return `String length must be greater than or equal to {{.min}}`
-}
-
-// StringLTE returns a format-string to format an StringLengthLTEError
-func (l DefaultLocale) StringLTE() string {
- return `String length must be less than or equal to {{.max}}`
-}
-
-// DoesNotMatchPattern returns a format-string to format an DoesNotMatchPatternError
-func (l DefaultLocale) DoesNotMatchPattern() string {
- return `Does not match pattern '{{.pattern}}'`
-}
-
-// DoesNotMatchFormat returns a format-string to format an DoesNotMatchFormatError
-func (l DefaultLocale) DoesNotMatchFormat() string {
- return `Does not match format '{{.format}}'`
-}
-
-// MultipleOf returns a format-string to format an MultipleOfError
-func (l DefaultLocale) MultipleOf() string {
- return `Must be a multiple of {{.multiple}}`
-}
-
-// NumberGTE returns the format string to format a NumberGTEError
-func (l DefaultLocale) NumberGTE() string {
- return `Must be greater than or equal to {{.min}}`
-}
-
-// NumberGT returns the format string to format a NumberGTError
-func (l DefaultLocale) NumberGT() string {
- return `Must be greater than {{.min}}`
-}
-
-// NumberLTE returns the format string to format a NumberLTEError
-func (l DefaultLocale) NumberLTE() string {
- return `Must be less than or equal to {{.max}}`
-}
-
-// NumberLT returns the format string to format a NumberLTError
-func (l DefaultLocale) NumberLT() string {
- return `Must be less than {{.max}}`
-}
-
-// Schema validators
-
-// RegexPattern returns a format-string to format a regex-pattern error
-func (l DefaultLocale) RegexPattern() string {
- return `Invalid regex pattern '{{.pattern}}'`
-}
-
-// GreaterThanZero returns a format-string to format an error where a number must be greater than zero
-func (l DefaultLocale) GreaterThanZero() string {
- return `{{.number}} must be strictly greater than 0`
-}
-
-// MustBeOfA returns a format-string to format an error where a value is of the wrong type
-func (l DefaultLocale) MustBeOfA() string {
- return `{{.x}} must be of a {{.y}}`
-}
-
-// MustBeOfAn returns a format-string to format an error where a value is of the wrong type
-func (l DefaultLocale) MustBeOfAn() string {
- return `{{.x}} must be of an {{.y}}`
-}
-
-// CannotBeUsedWithout returns a format-string to format a "cannot be used without" error
-func (l DefaultLocale) CannotBeUsedWithout() string {
- return `{{.x}} cannot be used without {{.y}}`
-}
-
-// CannotBeGT returns a format-string to format an error where a value are greater than allowed
-func (l DefaultLocale) CannotBeGT() string {
- return `{{.x}} cannot be greater than {{.y}}`
-}
-
-// MustBeOfType returns a format-string to format an error where a value does not match the required type
-func (l DefaultLocale) MustBeOfType() string {
- return `{{.key}} must be of type {{.type}}`
-}
-
-// MustBeValidRegex returns a format-string to format an error where a regex is invalid
-func (l DefaultLocale) MustBeValidRegex() string {
- return `{{.key}} must be a valid regex`
-}
-
-// MustBeValidFormat returns a format-string to format an error where a value does not match the expected format
-func (l DefaultLocale) MustBeValidFormat() string {
- return `{{.key}} must be a valid format {{.given}}`
-}
-
-// MustBeGTEZero returns a format-string to format an error where a value must be greater or equal than 0
-func (l DefaultLocale) MustBeGTEZero() string {
- return `{{.key}} must be greater than or equal to 0`
-}
-
-// KeyCannotBeGreaterThan returns a format-string to format an error where a value is greater than the maximum allowed
-func (l DefaultLocale) KeyCannotBeGreaterThan() string {
- return `{{.key}} cannot be greater than {{.y}}`
-}
-
-// KeyItemsMustBeOfType returns a format-string to format an error where a key is of the wrong type
-func (l DefaultLocale) KeyItemsMustBeOfType() string {
- return `{{.key}} items must be {{.type}}`
-}
-
-// KeyItemsMustBeUnique returns a format-string to format an error where keys are not unique
-func (l DefaultLocale) KeyItemsMustBeUnique() string {
- return `{{.key}} items must be unique`
-}
-
-// ReferenceMustBeCanonical returns a format-string to format a "reference must be canonical" error
-func (l DefaultLocale) ReferenceMustBeCanonical() string {
- return `Reference {{.reference}} must be canonical`
-}
-
-// NotAValidType returns a format-string to format an invalid type error
-func (l DefaultLocale) NotAValidType() string {
- return `has a primitive type that is NOT VALID -- given: {{.given}} Expected valid values are:{{.expected}}`
-}
-
-// Duplicated returns a format-string to format an error where types are duplicated
-func (l DefaultLocale) Duplicated() string {
- return `{{.type}} type is duplicated`
-}
-
-// HttpBadStatus returns a format-string for errors when loading a schema using HTTP
-func (l DefaultLocale) HttpBadStatus() string {
- return `Could not read schema from HTTP, response status is {{.status}}`
-}
-
-// ErrorFormat returns a format string for errors
-// Replacement options: field, description, context, value
-func (l DefaultLocale) ErrorFormat() string {
- return `{{.field}}: {{.description}}`
-}
-
-// ParseError returns a format-string for JSON parsing errors
-func (l DefaultLocale) ParseError() string {
- return `Expected: {{.expected}}, given: Invalid JSON`
-}
-
-// ConditionThen returns a format-string for ConditionThenError errors
-// If/Else
-func (l DefaultLocale) ConditionThen() string {
- return `Must validate "then" as "if" was valid`
-}
-
-// ConditionElse returns a format-string for ConditionElseError errors
-func (l DefaultLocale) ConditionElse() string {
- return `Must validate "else" as "if" was not valid`
-}
-
-// constants
-const (
- STRING_NUMBER = "number"
- STRING_ARRAY_OF_STRINGS = "array of strings"
- STRING_ARRAY_OF_SCHEMAS = "array of schemas"
- STRING_SCHEMA = "valid schema"
- STRING_SCHEMA_OR_ARRAY_OF_STRINGS = "schema or array of strings"
- STRING_PROPERTIES = "properties"
- STRING_DEPENDENCY = "dependency"
- STRING_PROPERTY = "property"
- STRING_UNDEFINED = "undefined"
- STRING_CONTEXT_ROOT = "(root)"
- STRING_ROOT_SCHEMA_PROPERTY = "(root)"
-)
diff --git a/vendor/github.com/xeipuuv/gojsonschema/result.go b/vendor/github.com/xeipuuv/gojsonschema/result.go
deleted file mode 100644
index 0a01791..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/result.go
+++ /dev/null
@@ -1,220 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Result and ResultError implementations.
-//
-// created 01-01-2015
-
-package gojsonschema
-
-import (
- "fmt"
- "strings"
-)
-
-type (
- // ErrorDetails is a map of details specific to each error.
- // While the values will vary, every error will contain a "field" value
- ErrorDetails map[string]interface{}
-
- // ResultError is the interface that library errors must implement
- ResultError interface {
- // Field returns the field name without the root context
- // i.e. firstName or person.firstName instead of (root).firstName or (root).person.firstName
- Field() string
- // SetType sets the error-type
- SetType(string)
- // Type returns the error-type
- Type() string
- // SetContext sets the JSON-context for the error
- SetContext(*JsonContext)
- // Context returns the JSON-context of the error
- Context() *JsonContext
- // SetDescription sets a description for the error
- SetDescription(string)
- // Description returns the description of the error
- Description() string
- // SetDescriptionFormat sets the format for the description in the default text/template format
- SetDescriptionFormat(string)
- // DescriptionFormat returns the format for the description in the default text/template format
- DescriptionFormat() string
- // SetValue sets the value related to the error
- SetValue(interface{})
- // Value returns the value related to the error
- Value() interface{}
- // SetDetails sets the details specific to the error
- SetDetails(ErrorDetails)
- // Details returns details about the error
- Details() ErrorDetails
- // String returns a string representation of the error
- String() string
- }
-
- // ResultErrorFields holds the fields for each ResultError implementation.
- // ResultErrorFields implements the ResultError interface, so custom errors
- // can be defined by just embedding this type
- ResultErrorFields struct {
- errorType string // A string with the type of error (i.e. invalid_type)
- context *JsonContext // Tree like notation of the part that failed the validation. ex (root).a.b ...
- description string // A human readable error message
- descriptionFormat string // A format for human readable error message
- value interface{} // Value given by the JSON file that is the source of the error
- details ErrorDetails
- }
-
- // Result holds the result of a validation
- Result struct {
- errors []ResultError
- // Scores how well the validation matched. Useful in generating
- // better error messages for anyOf and oneOf.
- score int
- }
-)
-
-// Field returns the field name without the root context
-// i.e. firstName or person.firstName instead of (root).firstName or (root).person.firstName
-func (v *ResultErrorFields) Field() string {
- return strings.TrimPrefix(v.context.String(), STRING_ROOT_SCHEMA_PROPERTY+".")
-}
-
-// SetType sets the error-type
-func (v *ResultErrorFields) SetType(errorType string) {
- v.errorType = errorType
-}
-
-// Type returns the error-type
-func (v *ResultErrorFields) Type() string {
- return v.errorType
-}
-
-// SetContext sets the JSON-context for the error
-func (v *ResultErrorFields) SetContext(context *JsonContext) {
- v.context = context
-}
-
-// Context returns the JSON-context of the error
-func (v *ResultErrorFields) Context() *JsonContext {
- return v.context
-}
-
-// SetDescription sets a description for the error
-func (v *ResultErrorFields) SetDescription(description string) {
- v.description = description
-}
-
-// Description returns the description of the error
-func (v *ResultErrorFields) Description() string {
- return v.description
-}
-
-// SetDescriptionFormat sets the format for the description in the default text/template format
-func (v *ResultErrorFields) SetDescriptionFormat(descriptionFormat string) {
- v.descriptionFormat = descriptionFormat
-}
-
-// DescriptionFormat returns the format for the description in the default text/template format
-func (v *ResultErrorFields) DescriptionFormat() string {
- return v.descriptionFormat
-}
-
-// SetValue sets the value related to the error
-func (v *ResultErrorFields) SetValue(value interface{}) {
- v.value = value
-}
-
-// Value returns the value related to the error
-func (v *ResultErrorFields) Value() interface{} {
- return v.value
-}
-
-// SetDetails sets the details specific to the error
-func (v *ResultErrorFields) SetDetails(details ErrorDetails) {
- v.details = details
-}
-
-// Details returns details about the error
-func (v *ResultErrorFields) Details() ErrorDetails {
- return v.details
-}
-
-// String returns a string representation of the error
-func (v ResultErrorFields) String() string {
- // as a fallback, the value is displayed go style
- valueString := fmt.Sprintf("%v", v.value)
-
- // marshal the go value value to json
- if v.value == nil {
- valueString = TYPE_NULL
- } else {
- if vs, err := marshalToJSONString(v.value); err == nil {
- if vs == nil {
- valueString = TYPE_NULL
- } else {
- valueString = *vs
- }
- }
- }
-
- return formatErrorDescription(Locale.ErrorFormat(), ErrorDetails{
- "context": v.context.String(),
- "description": v.description,
- "value": valueString,
- "field": v.Field(),
- })
-}
-
-// Valid indicates if no errors were found
-func (v *Result) Valid() bool {
- return len(v.errors) == 0
-}
-
-// Errors returns the errors that were found
-func (v *Result) Errors() []ResultError {
- return v.errors
-}
-
-// AddError appends a fully filled error to the error set
-// SetDescription() will be called with the result of the parsed err.DescriptionFormat()
-func (v *Result) AddError(err ResultError, details ErrorDetails) {
- if _, exists := details["context"]; !exists && err.Context() != nil {
- details["context"] = err.Context().String()
- }
-
- err.SetDescription(formatErrorDescription(err.DescriptionFormat(), details))
-
- v.errors = append(v.errors, err)
-}
-
-func (v *Result) addInternalError(err ResultError, context *JsonContext, value interface{}, details ErrorDetails) {
- newError(err, context, value, Locale, details)
- v.errors = append(v.errors, err)
- v.score -= 2 // results in a net -1 when added to the +1 we get at the end of the validation function
-}
-
-// Used to copy errors from a sub-schema to the main one
-func (v *Result) mergeErrors(otherResult *Result) {
- v.errors = append(v.errors, otherResult.Errors()...)
- v.score += otherResult.score
-}
-
-func (v *Result) incrementScore() {
- v.score++
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/schema.go b/vendor/github.com/xeipuuv/gojsonschema/schema.go
deleted file mode 100644
index 9e93cd7..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/schema.go
+++ /dev/null
@@ -1,1087 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Defines Schema, the main entry to every subSchema.
-// Contains the parsing logic and error checking.
-//
-// created 26-02-2013
-
-package gojsonschema
-
-import (
- "errors"
- "math/big"
- "reflect"
- "regexp"
- "text/template"
-
- "github.com/xeipuuv/gojsonreference"
-)
-
-var (
- // Locale is the default locale to use
- // Library users can overwrite with their own implementation
- Locale locale = DefaultLocale{}
-
- // ErrorTemplateFuncs allows you to define custom template funcs for use in localization.
- ErrorTemplateFuncs template.FuncMap
-)
-
-// NewSchema instances a schema using the given JSONLoader
-func NewSchema(l JSONLoader) (*Schema, error) {
- return NewSchemaLoader().Compile(l)
-}
-
-// Schema holds a schema
-type Schema struct {
- documentReference gojsonreference.JsonReference
- rootSchema *subSchema
- pool *schemaPool
- referencePool *schemaReferencePool
-}
-
-func (d *Schema) parse(document interface{}, draft Draft) error {
- d.rootSchema = &subSchema{property: STRING_ROOT_SCHEMA_PROPERTY, draft: &draft}
- return d.parseSchema(document, d.rootSchema)
-}
-
-// SetRootSchemaName sets the root-schema name
-func (d *Schema) SetRootSchemaName(name string) {
- d.rootSchema.property = name
-}
-
-// Parses a subSchema
-//
-// Pretty long function ( sorry :) )... but pretty straight forward, repetitive and boring
-// Not much magic involved here, most of the job is to validate the key names and their values,
-// then the values are copied into subSchema struct
-//
-func (d *Schema) parseSchema(documentNode interface{}, currentSchema *subSchema) error {
-
- if currentSchema.draft == nil {
- if currentSchema.parent == nil {
- return errors.New("Draft not set")
- }
- currentSchema.draft = currentSchema.parent.draft
- }
-
- // As of draft 6 "true" is equivalent to an empty schema "{}" and false equals "{"not":{}}"
- if *currentSchema.draft >= Draft6 && isKind(documentNode, reflect.Bool) {
- b := documentNode.(bool)
- currentSchema.pass = &b
- return nil
- }
-
- if !isKind(documentNode, reflect.Map) {
- return errors.New(formatErrorDescription(
- Locale.ParseError(),
- ErrorDetails{
- "expected": STRING_SCHEMA,
- },
- ))
- }
-
- m := documentNode.(map[string]interface{})
-
- if currentSchema.parent == nil {
- currentSchema.ref = &d.documentReference
- currentSchema.id = &d.documentReference
- }
-
- if currentSchema.id == nil && currentSchema.parent != nil {
- currentSchema.id = currentSchema.parent.id
- }
-
- // In draft 6 the id keyword was renamed to $id
- // Hybrid mode uses the old id by default
- var keyID string
-
- switch *currentSchema.draft {
- case Draft4:
- keyID = KEY_ID
- case Hybrid:
- keyID = KEY_ID_NEW
- if existsMapKey(m, KEY_ID) {
- keyID = KEY_ID
- }
- default:
- keyID = KEY_ID_NEW
- }
- if existsMapKey(m, keyID) && !isKind(m[keyID], reflect.String) {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_STRING,
- "given": keyID,
- },
- ))
- }
- if k, ok := m[keyID].(string); ok {
- jsonReference, err := gojsonreference.NewJsonReference(k)
- if err != nil {
- return err
- }
- if currentSchema == d.rootSchema {
- currentSchema.id = &jsonReference
- } else {
- ref, err := currentSchema.parent.id.Inherits(jsonReference)
- if err != nil {
- return err
- }
- currentSchema.id = ref
- }
- }
-
- // definitions
- if existsMapKey(m, KEY_DEFINITIONS) {
- if isKind(m[KEY_DEFINITIONS], reflect.Map, reflect.Bool) {
- for _, dv := range m[KEY_DEFINITIONS].(map[string]interface{}) {
- if isKind(dv, reflect.Map, reflect.Bool) {
-
- newSchema := &subSchema{property: KEY_DEFINITIONS, parent: currentSchema}
-
- err := d.parseSchema(dv, newSchema)
-
- if err != nil {
- return err
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": STRING_ARRAY_OF_SCHEMAS,
- "given": KEY_DEFINITIONS,
- },
- ))
- }
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": STRING_ARRAY_OF_SCHEMAS,
- "given": KEY_DEFINITIONS,
- },
- ))
- }
-
- }
-
- // title
- if existsMapKey(m, KEY_TITLE) && !isKind(m[KEY_TITLE], reflect.String) {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_STRING,
- "given": KEY_TITLE,
- },
- ))
- }
- if k, ok := m[KEY_TITLE].(string); ok {
- currentSchema.title = &k
- }
-
- // description
- if existsMapKey(m, KEY_DESCRIPTION) && !isKind(m[KEY_DESCRIPTION], reflect.String) {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_STRING,
- "given": KEY_DESCRIPTION,
- },
- ))
- }
- if k, ok := m[KEY_DESCRIPTION].(string); ok {
- currentSchema.description = &k
- }
-
- // $ref
- if existsMapKey(m, KEY_REF) && !isKind(m[KEY_REF], reflect.String) {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_STRING,
- "given": KEY_REF,
- },
- ))
- }
-
- if k, ok := m[KEY_REF].(string); ok {
-
- jsonReference, err := gojsonreference.NewJsonReference(k)
- if err != nil {
- return err
- }
-
- currentSchema.ref = &jsonReference
-
- if sch, ok := d.referencePool.Get(currentSchema.ref.String()); ok {
- currentSchema.refSchema = sch
- } else {
- err := d.parseReference(documentNode, currentSchema)
-
- if err != nil {
- return err
- }
-
- return nil
- }
- }
-
- // type
- if existsMapKey(m, KEY_TYPE) {
- if isKind(m[KEY_TYPE], reflect.String) {
- if k, ok := m[KEY_TYPE].(string); ok {
- err := currentSchema.types.Add(k)
- if err != nil {
- return err
- }
- }
- } else {
- if isKind(m[KEY_TYPE], reflect.Slice) {
- arrayOfTypes := m[KEY_TYPE].([]interface{})
- for _, typeInArray := range arrayOfTypes {
- if reflect.ValueOf(typeInArray).Kind() != reflect.String {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_STRING + "/" + STRING_ARRAY_OF_STRINGS,
- "given": KEY_TYPE,
- },
- ))
- }
- if err := currentSchema.types.Add(typeInArray.(string)); err != nil {
- return err
- }
- }
-
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_STRING + "/" + STRING_ARRAY_OF_STRINGS,
- "given": KEY_TYPE,
- },
- ))
- }
- }
- }
-
- // properties
- if existsMapKey(m, KEY_PROPERTIES) {
- err := d.parseProperties(m[KEY_PROPERTIES], currentSchema)
- if err != nil {
- return err
- }
- }
-
- // additionalProperties
- if existsMapKey(m, KEY_ADDITIONAL_PROPERTIES) {
- if isKind(m[KEY_ADDITIONAL_PROPERTIES], reflect.Bool) {
- currentSchema.additionalProperties = m[KEY_ADDITIONAL_PROPERTIES].(bool)
- } else if isKind(m[KEY_ADDITIONAL_PROPERTIES], reflect.Map) {
- newSchema := &subSchema{property: KEY_ADDITIONAL_PROPERTIES, parent: currentSchema, ref: currentSchema.ref}
- currentSchema.additionalProperties = newSchema
- err := d.parseSchema(m[KEY_ADDITIONAL_PROPERTIES], newSchema)
- if err != nil {
- return errors.New(err.Error())
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_BOOLEAN + "/" + STRING_SCHEMA,
- "given": KEY_ADDITIONAL_PROPERTIES,
- },
- ))
- }
- }
-
- // patternProperties
- if existsMapKey(m, KEY_PATTERN_PROPERTIES) {
- if isKind(m[KEY_PATTERN_PROPERTIES], reflect.Map) {
- patternPropertiesMap := m[KEY_PATTERN_PROPERTIES].(map[string]interface{})
- if len(patternPropertiesMap) > 0 {
- currentSchema.patternProperties = make(map[string]*subSchema)
- for k, v := range patternPropertiesMap {
- _, err := regexp.MatchString(k, "")
- if err != nil {
- return errors.New(formatErrorDescription(
- Locale.RegexPattern(),
- ErrorDetails{"pattern": k},
- ))
- }
- newSchema := &subSchema{property: k, parent: currentSchema, ref: currentSchema.ref}
- err = d.parseSchema(v, newSchema)
- if err != nil {
- return errors.New(err.Error())
- }
- currentSchema.patternProperties[k] = newSchema
- }
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": STRING_SCHEMA,
- "given": KEY_PATTERN_PROPERTIES,
- },
- ))
- }
- }
-
- // propertyNames
- if existsMapKey(m, KEY_PROPERTY_NAMES) && *currentSchema.draft >= Draft6 {
- if isKind(m[KEY_PROPERTY_NAMES], reflect.Map, reflect.Bool) {
- newSchema := &subSchema{property: KEY_PROPERTY_NAMES, parent: currentSchema, ref: currentSchema.ref}
- currentSchema.propertyNames = newSchema
- err := d.parseSchema(m[KEY_PROPERTY_NAMES], newSchema)
- if err != nil {
- return err
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": STRING_SCHEMA,
- "given": KEY_PATTERN_PROPERTIES,
- },
- ))
- }
- }
-
- // dependencies
- if existsMapKey(m, KEY_DEPENDENCIES) {
- err := d.parseDependencies(m[KEY_DEPENDENCIES], currentSchema)
- if err != nil {
- return err
- }
- }
-
- // items
- if existsMapKey(m, KEY_ITEMS) {
- if isKind(m[KEY_ITEMS], reflect.Slice) {
- for _, itemElement := range m[KEY_ITEMS].([]interface{}) {
- if isKind(itemElement, reflect.Map, reflect.Bool) {
- newSchema := &subSchema{parent: currentSchema, property: KEY_ITEMS}
- newSchema.ref = currentSchema.ref
- currentSchema.itemsChildren = append(currentSchema.itemsChildren, newSchema)
- err := d.parseSchema(itemElement, newSchema)
- if err != nil {
- return err
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": STRING_SCHEMA + "/" + STRING_ARRAY_OF_SCHEMAS,
- "given": KEY_ITEMS,
- },
- ))
- }
- currentSchema.itemsChildrenIsSingleSchema = false
- }
- } else if isKind(m[KEY_ITEMS], reflect.Map, reflect.Bool) {
- newSchema := &subSchema{parent: currentSchema, property: KEY_ITEMS}
- newSchema.ref = currentSchema.ref
- currentSchema.itemsChildren = append(currentSchema.itemsChildren, newSchema)
- err := d.parseSchema(m[KEY_ITEMS], newSchema)
- if err != nil {
- return err
- }
- currentSchema.itemsChildrenIsSingleSchema = true
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": STRING_SCHEMA + "/" + STRING_ARRAY_OF_SCHEMAS,
- "given": KEY_ITEMS,
- },
- ))
- }
- }
-
- // additionalItems
- if existsMapKey(m, KEY_ADDITIONAL_ITEMS) {
- if isKind(m[KEY_ADDITIONAL_ITEMS], reflect.Bool) {
- currentSchema.additionalItems = m[KEY_ADDITIONAL_ITEMS].(bool)
- } else if isKind(m[KEY_ADDITIONAL_ITEMS], reflect.Map) {
- newSchema := &subSchema{property: KEY_ADDITIONAL_ITEMS, parent: currentSchema, ref: currentSchema.ref}
- currentSchema.additionalItems = newSchema
- err := d.parseSchema(m[KEY_ADDITIONAL_ITEMS], newSchema)
- if err != nil {
- return errors.New(err.Error())
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_BOOLEAN + "/" + STRING_SCHEMA,
- "given": KEY_ADDITIONAL_ITEMS,
- },
- ))
- }
- }
-
- // validation : number / integer
-
- if existsMapKey(m, KEY_MULTIPLE_OF) {
- multipleOfValue := mustBeNumber(m[KEY_MULTIPLE_OF])
- if multipleOfValue == nil {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": STRING_NUMBER,
- "given": KEY_MULTIPLE_OF,
- },
- ))
- }
- if multipleOfValue.Cmp(big.NewRat(0, 1)) <= 0 {
- return errors.New(formatErrorDescription(
- Locale.GreaterThanZero(),
- ErrorDetails{"number": KEY_MULTIPLE_OF},
- ))
- }
- currentSchema.multipleOf = multipleOfValue
- }
-
- if existsMapKey(m, KEY_MINIMUM) {
- minimumValue := mustBeNumber(m[KEY_MINIMUM])
- if minimumValue == nil {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfA(),
- ErrorDetails{"x": KEY_MINIMUM, "y": STRING_NUMBER},
- ))
- }
- currentSchema.minimum = minimumValue
- }
-
- if existsMapKey(m, KEY_EXCLUSIVE_MINIMUM) {
- switch *currentSchema.draft {
- case Draft4:
- if !isKind(m[KEY_EXCLUSIVE_MINIMUM], reflect.Bool) {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_BOOLEAN,
- "given": KEY_EXCLUSIVE_MINIMUM,
- },
- ))
- }
- if currentSchema.minimum == nil {
- return errors.New(formatErrorDescription(
- Locale.CannotBeUsedWithout(),
- ErrorDetails{"x": KEY_EXCLUSIVE_MINIMUM, "y": KEY_MINIMUM},
- ))
- }
- if m[KEY_EXCLUSIVE_MINIMUM].(bool) {
- currentSchema.exclusiveMinimum = currentSchema.minimum
- currentSchema.minimum = nil
- }
- case Hybrid:
- if isKind(m[KEY_EXCLUSIVE_MINIMUM], reflect.Bool) {
- if currentSchema.minimum == nil {
- return errors.New(formatErrorDescription(
- Locale.CannotBeUsedWithout(),
- ErrorDetails{"x": KEY_EXCLUSIVE_MINIMUM, "y": KEY_MINIMUM},
- ))
- }
- if m[KEY_EXCLUSIVE_MINIMUM].(bool) {
- currentSchema.exclusiveMinimum = currentSchema.minimum
- currentSchema.minimum = nil
- }
- } else if isJSONNumber(m[KEY_EXCLUSIVE_MINIMUM]) {
- currentSchema.exclusiveMinimum = mustBeNumber(m[KEY_EXCLUSIVE_MINIMUM])
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_BOOLEAN + "/" + TYPE_NUMBER,
- "given": KEY_EXCLUSIVE_MINIMUM,
- },
- ))
- }
- default:
- if isJSONNumber(m[KEY_EXCLUSIVE_MINIMUM]) {
- currentSchema.exclusiveMinimum = mustBeNumber(m[KEY_EXCLUSIVE_MINIMUM])
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_NUMBER,
- "given": KEY_EXCLUSIVE_MINIMUM,
- },
- ))
- }
- }
- }
-
- if existsMapKey(m, KEY_MAXIMUM) {
- maximumValue := mustBeNumber(m[KEY_MAXIMUM])
- if maximumValue == nil {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfA(),
- ErrorDetails{"x": KEY_MAXIMUM, "y": STRING_NUMBER},
- ))
- }
- currentSchema.maximum = maximumValue
- }
-
- if existsMapKey(m, KEY_EXCLUSIVE_MAXIMUM) {
- switch *currentSchema.draft {
- case Draft4:
- if !isKind(m[KEY_EXCLUSIVE_MAXIMUM], reflect.Bool) {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_BOOLEAN,
- "given": KEY_EXCLUSIVE_MAXIMUM,
- },
- ))
- }
- if currentSchema.maximum == nil {
- return errors.New(formatErrorDescription(
- Locale.CannotBeUsedWithout(),
- ErrorDetails{"x": KEY_EXCLUSIVE_MAXIMUM, "y": KEY_MAXIMUM},
- ))
- }
- if m[KEY_EXCLUSIVE_MAXIMUM].(bool) {
- currentSchema.exclusiveMaximum = currentSchema.maximum
- currentSchema.maximum = nil
- }
- case Hybrid:
- if isKind(m[KEY_EXCLUSIVE_MAXIMUM], reflect.Bool) {
- if currentSchema.maximum == nil {
- return errors.New(formatErrorDescription(
- Locale.CannotBeUsedWithout(),
- ErrorDetails{"x": KEY_EXCLUSIVE_MAXIMUM, "y": KEY_MAXIMUM},
- ))
- }
- if m[KEY_EXCLUSIVE_MAXIMUM].(bool) {
- currentSchema.exclusiveMaximum = currentSchema.maximum
- currentSchema.maximum = nil
- }
- } else if isJSONNumber(m[KEY_EXCLUSIVE_MAXIMUM]) {
- currentSchema.exclusiveMaximum = mustBeNumber(m[KEY_EXCLUSIVE_MAXIMUM])
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_BOOLEAN + "/" + TYPE_NUMBER,
- "given": KEY_EXCLUSIVE_MAXIMUM,
- },
- ))
- }
- default:
- if isJSONNumber(m[KEY_EXCLUSIVE_MAXIMUM]) {
- currentSchema.exclusiveMaximum = mustBeNumber(m[KEY_EXCLUSIVE_MAXIMUM])
- } else {
- return errors.New(formatErrorDescription(
- Locale.InvalidType(),
- ErrorDetails{
- "expected": TYPE_NUMBER,
- "given": KEY_EXCLUSIVE_MAXIMUM,
- },
- ))
- }
- }
- }
-
- // validation : string
-
- if existsMapKey(m, KEY_MIN_LENGTH) {
- minLengthIntegerValue := mustBeInteger(m[KEY_MIN_LENGTH])
- if minLengthIntegerValue == nil {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_MIN_LENGTH, "y": TYPE_INTEGER},
- ))
- }
- if *minLengthIntegerValue < 0 {
- return errors.New(formatErrorDescription(
- Locale.MustBeGTEZero(),
- ErrorDetails{"key": KEY_MIN_LENGTH},
- ))
- }
- currentSchema.minLength = minLengthIntegerValue
- }
-
- if existsMapKey(m, KEY_MAX_LENGTH) {
- maxLengthIntegerValue := mustBeInteger(m[KEY_MAX_LENGTH])
- if maxLengthIntegerValue == nil {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_MAX_LENGTH, "y": TYPE_INTEGER},
- ))
- }
- if *maxLengthIntegerValue < 0 {
- return errors.New(formatErrorDescription(
- Locale.MustBeGTEZero(),
- ErrorDetails{"key": KEY_MAX_LENGTH},
- ))
- }
- currentSchema.maxLength = maxLengthIntegerValue
- }
-
- if currentSchema.minLength != nil && currentSchema.maxLength != nil {
- if *currentSchema.minLength > *currentSchema.maxLength {
- return errors.New(formatErrorDescription(
- Locale.CannotBeGT(),
- ErrorDetails{"x": KEY_MIN_LENGTH, "y": KEY_MAX_LENGTH},
- ))
- }
- }
-
- if existsMapKey(m, KEY_PATTERN) {
- if isKind(m[KEY_PATTERN], reflect.String) {
- regexpObject, err := regexp.Compile(m[KEY_PATTERN].(string))
- if err != nil {
- return errors.New(formatErrorDescription(
- Locale.MustBeValidRegex(),
- ErrorDetails{"key": KEY_PATTERN},
- ))
- }
- currentSchema.pattern = regexpObject
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfA(),
- ErrorDetails{"x": KEY_PATTERN, "y": TYPE_STRING},
- ))
- }
- }
-
- if existsMapKey(m, KEY_FORMAT) {
- formatString, ok := m[KEY_FORMAT].(string)
- if !ok {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfType(),
- ErrorDetails{"key": KEY_FORMAT, "type": TYPE_STRING},
- ))
- }
- currentSchema.format = formatString
- }
-
- // validation : object
-
- if existsMapKey(m, KEY_MIN_PROPERTIES) {
- minPropertiesIntegerValue := mustBeInteger(m[KEY_MIN_PROPERTIES])
- if minPropertiesIntegerValue == nil {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_MIN_PROPERTIES, "y": TYPE_INTEGER},
- ))
- }
- if *minPropertiesIntegerValue < 0 {
- return errors.New(formatErrorDescription(
- Locale.MustBeGTEZero(),
- ErrorDetails{"key": KEY_MIN_PROPERTIES},
- ))
- }
- currentSchema.minProperties = minPropertiesIntegerValue
- }
-
- if existsMapKey(m, KEY_MAX_PROPERTIES) {
- maxPropertiesIntegerValue := mustBeInteger(m[KEY_MAX_PROPERTIES])
- if maxPropertiesIntegerValue == nil {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_MAX_PROPERTIES, "y": TYPE_INTEGER},
- ))
- }
- if *maxPropertiesIntegerValue < 0 {
- return errors.New(formatErrorDescription(
- Locale.MustBeGTEZero(),
- ErrorDetails{"key": KEY_MAX_PROPERTIES},
- ))
- }
- currentSchema.maxProperties = maxPropertiesIntegerValue
- }
-
- if currentSchema.minProperties != nil && currentSchema.maxProperties != nil {
- if *currentSchema.minProperties > *currentSchema.maxProperties {
- return errors.New(formatErrorDescription(
- Locale.KeyCannotBeGreaterThan(),
- ErrorDetails{"key": KEY_MIN_PROPERTIES, "y": KEY_MAX_PROPERTIES},
- ))
- }
- }
-
- if existsMapKey(m, KEY_REQUIRED) {
- if isKind(m[KEY_REQUIRED], reflect.Slice) {
- requiredValues := m[KEY_REQUIRED].([]interface{})
- for _, requiredValue := range requiredValues {
- if isKind(requiredValue, reflect.String) {
- if isStringInSlice(currentSchema.required, requiredValue.(string)) {
- return errors.New(formatErrorDescription(
- Locale.KeyItemsMustBeUnique(),
- ErrorDetails{"key": KEY_REQUIRED},
- ))
- }
- currentSchema.required = append(currentSchema.required, requiredValue.(string))
- } else {
- return errors.New(formatErrorDescription(
- Locale.KeyItemsMustBeOfType(),
- ErrorDetails{"key": KEY_REQUIRED, "type": TYPE_STRING},
- ))
- }
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_REQUIRED, "y": TYPE_ARRAY},
- ))
- }
- }
-
- // validation : array
-
- if existsMapKey(m, KEY_MIN_ITEMS) {
- minItemsIntegerValue := mustBeInteger(m[KEY_MIN_ITEMS])
- if minItemsIntegerValue == nil {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_MIN_ITEMS, "y": TYPE_INTEGER},
- ))
- }
- if *minItemsIntegerValue < 0 {
- return errors.New(formatErrorDescription(
- Locale.MustBeGTEZero(),
- ErrorDetails{"key": KEY_MIN_ITEMS},
- ))
- }
- currentSchema.minItems = minItemsIntegerValue
- }
-
- if existsMapKey(m, KEY_MAX_ITEMS) {
- maxItemsIntegerValue := mustBeInteger(m[KEY_MAX_ITEMS])
- if maxItemsIntegerValue == nil {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_MAX_ITEMS, "y": TYPE_INTEGER},
- ))
- }
- if *maxItemsIntegerValue < 0 {
- return errors.New(formatErrorDescription(
- Locale.MustBeGTEZero(),
- ErrorDetails{"key": KEY_MAX_ITEMS},
- ))
- }
- currentSchema.maxItems = maxItemsIntegerValue
- }
-
- if existsMapKey(m, KEY_UNIQUE_ITEMS) {
- if isKind(m[KEY_UNIQUE_ITEMS], reflect.Bool) {
- currentSchema.uniqueItems = m[KEY_UNIQUE_ITEMS].(bool)
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfA(),
- ErrorDetails{"x": KEY_UNIQUE_ITEMS, "y": TYPE_BOOLEAN},
- ))
- }
- }
-
- if existsMapKey(m, KEY_CONTAINS) && *currentSchema.draft >= Draft6 {
- newSchema := &subSchema{property: KEY_CONTAINS, parent: currentSchema, ref: currentSchema.ref}
- currentSchema.contains = newSchema
- err := d.parseSchema(m[KEY_CONTAINS], newSchema)
- if err != nil {
- return err
- }
- }
-
- // validation : all
-
- if existsMapKey(m, KEY_CONST) && *currentSchema.draft >= Draft6 {
- is, err := marshalWithoutNumber(m[KEY_CONST])
- if err != nil {
- return err
- }
- currentSchema._const = is
- }
-
- if existsMapKey(m, KEY_ENUM) {
- if isKind(m[KEY_ENUM], reflect.Slice) {
- for _, v := range m[KEY_ENUM].([]interface{}) {
- is, err := marshalWithoutNumber(v)
- if err != nil {
- return err
- }
- if isStringInSlice(currentSchema.enum, *is) {
- return errors.New(formatErrorDescription(
- Locale.KeyItemsMustBeUnique(),
- ErrorDetails{"key": KEY_ENUM},
- ))
- }
- currentSchema.enum = append(currentSchema.enum, *is)
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_ENUM, "y": TYPE_ARRAY},
- ))
- }
- }
-
- // validation : subSchema
-
- if existsMapKey(m, KEY_ONE_OF) {
- if isKind(m[KEY_ONE_OF], reflect.Slice) {
- for _, v := range m[KEY_ONE_OF].([]interface{}) {
- newSchema := &subSchema{property: KEY_ONE_OF, parent: currentSchema, ref: currentSchema.ref}
- currentSchema.oneOf = append(currentSchema.oneOf, newSchema)
- err := d.parseSchema(v, newSchema)
- if err != nil {
- return err
- }
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_ONE_OF, "y": TYPE_ARRAY},
- ))
- }
- }
-
- if existsMapKey(m, KEY_ANY_OF) {
- if isKind(m[KEY_ANY_OF], reflect.Slice) {
- for _, v := range m[KEY_ANY_OF].([]interface{}) {
- newSchema := &subSchema{property: KEY_ANY_OF, parent: currentSchema, ref: currentSchema.ref}
- currentSchema.anyOf = append(currentSchema.anyOf, newSchema)
- err := d.parseSchema(v, newSchema)
- if err != nil {
- return err
- }
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_ANY_OF, "y": TYPE_ARRAY},
- ))
- }
- }
-
- if existsMapKey(m, KEY_ALL_OF) {
- if isKind(m[KEY_ALL_OF], reflect.Slice) {
- for _, v := range m[KEY_ALL_OF].([]interface{}) {
- newSchema := &subSchema{property: KEY_ALL_OF, parent: currentSchema, ref: currentSchema.ref}
- currentSchema.allOf = append(currentSchema.allOf, newSchema)
- err := d.parseSchema(v, newSchema)
- if err != nil {
- return err
- }
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_ANY_OF, "y": TYPE_ARRAY},
- ))
- }
- }
-
- if existsMapKey(m, KEY_NOT) {
- if isKind(m[KEY_NOT], reflect.Map, reflect.Bool) {
- newSchema := &subSchema{property: KEY_NOT, parent: currentSchema, ref: currentSchema.ref}
- currentSchema.not = newSchema
- err := d.parseSchema(m[KEY_NOT], newSchema)
- if err != nil {
- return err
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_NOT, "y": TYPE_OBJECT},
- ))
- }
- }
-
- if *currentSchema.draft >= Draft7 {
- if existsMapKey(m, KEY_IF) {
- if isKind(m[KEY_IF], reflect.Map, reflect.Bool) {
- newSchema := &subSchema{property: KEY_IF, parent: currentSchema, ref: currentSchema.ref}
- currentSchema._if = newSchema
- err := d.parseSchema(m[KEY_IF], newSchema)
- if err != nil {
- return err
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_IF, "y": TYPE_OBJECT},
- ))
- }
- }
-
- if existsMapKey(m, KEY_THEN) {
- if isKind(m[KEY_THEN], reflect.Map, reflect.Bool) {
- newSchema := &subSchema{property: KEY_THEN, parent: currentSchema, ref: currentSchema.ref}
- currentSchema._then = newSchema
- err := d.parseSchema(m[KEY_THEN], newSchema)
- if err != nil {
- return err
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_THEN, "y": TYPE_OBJECT},
- ))
- }
- }
-
- if existsMapKey(m, KEY_ELSE) {
- if isKind(m[KEY_ELSE], reflect.Map, reflect.Bool) {
- newSchema := &subSchema{property: KEY_ELSE, parent: currentSchema, ref: currentSchema.ref}
- currentSchema._else = newSchema
- err := d.parseSchema(m[KEY_ELSE], newSchema)
- if err != nil {
- return err
- }
- } else {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfAn(),
- ErrorDetails{"x": KEY_ELSE, "y": TYPE_OBJECT},
- ))
- }
- }
- }
-
- return nil
-}
-
-func (d *Schema) parseReference(documentNode interface{}, currentSchema *subSchema) error {
- var (
- refdDocumentNode interface{}
- dsp *schemaPoolDocument
- err error
- )
-
- newSchema := &subSchema{property: KEY_REF, parent: currentSchema, ref: currentSchema.ref}
-
- d.referencePool.Add(currentSchema.ref.String(), newSchema)
-
- dsp, err = d.pool.GetDocument(*currentSchema.ref)
- if err != nil {
- return err
- }
- newSchema.id = currentSchema.ref
-
- refdDocumentNode = dsp.Document
- newSchema.draft = dsp.Draft
-
- if err != nil {
- return err
- }
-
- if !isKind(refdDocumentNode, reflect.Map, reflect.Bool) {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfType(),
- ErrorDetails{"key": STRING_SCHEMA, "type": TYPE_OBJECT},
- ))
- }
-
- err = d.parseSchema(refdDocumentNode, newSchema)
- if err != nil {
- return err
- }
-
- currentSchema.refSchema = newSchema
-
- return nil
-
-}
-
-func (d *Schema) parseProperties(documentNode interface{}, currentSchema *subSchema) error {
-
- if !isKind(documentNode, reflect.Map) {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfType(),
- ErrorDetails{"key": STRING_PROPERTIES, "type": TYPE_OBJECT},
- ))
- }
-
- m := documentNode.(map[string]interface{})
- for k := range m {
- schemaProperty := k
- newSchema := &subSchema{property: schemaProperty, parent: currentSchema, ref: currentSchema.ref}
- currentSchema.propertiesChildren = append(currentSchema.propertiesChildren, newSchema)
- err := d.parseSchema(m[k], newSchema)
- if err != nil {
- return err
- }
- }
-
- return nil
-}
-
-func (d *Schema) parseDependencies(documentNode interface{}, currentSchema *subSchema) error {
-
- if !isKind(documentNode, reflect.Map) {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfType(),
- ErrorDetails{"key": KEY_DEPENDENCIES, "type": TYPE_OBJECT},
- ))
- }
-
- m := documentNode.(map[string]interface{})
- currentSchema.dependencies = make(map[string]interface{})
-
- for k := range m {
- switch reflect.ValueOf(m[k]).Kind() {
-
- case reflect.Slice:
- values := m[k].([]interface{})
- var valuesToRegister []string
-
- for _, value := range values {
- if !isKind(value, reflect.String) {
- return errors.New(formatErrorDescription(
- Locale.MustBeOfType(),
- ErrorDetails{
- "key": STRING_DEPENDENCY,
- "type": STRING_SCHEMA_OR_ARRAY_OF_STRINGS,
- },
- ))
- }
- valuesToRegister = append(valuesToRegister, value.(string))
- currentSchema.dependencies[k] = valuesToRegister
- }
-
- case reflect.Map, reflect.Bool:
- depSchema := &subSchema{property: k, parent: currentSchema, ref: currentSchema.ref}
- err := d.parseSchema(m[k], depSchema)
- if err != nil {
- return err
- }
- currentSchema.dependencies[k] = depSchema
-
- default:
- return errors.New(formatErrorDescription(
- Locale.MustBeOfType(),
- ErrorDetails{
- "key": STRING_DEPENDENCY,
- "type": STRING_SCHEMA_OR_ARRAY_OF_STRINGS,
- },
- ))
- }
-
- }
-
- return nil
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/schemaLoader.go b/vendor/github.com/xeipuuv/gojsonschema/schemaLoader.go
deleted file mode 100644
index 20db0c1..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/schemaLoader.go
+++ /dev/null
@@ -1,206 +0,0 @@
-// Copyright 2018 johandorland ( /~https://github.com/johandorland )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package gojsonschema
-
-import (
- "bytes"
- "errors"
-
- "github.com/xeipuuv/gojsonreference"
-)
-
-// SchemaLoader is used to load schemas
-type SchemaLoader struct {
- pool *schemaPool
- AutoDetect bool
- Validate bool
- Draft Draft
-}
-
-// NewSchemaLoader creates a new NewSchemaLoader
-func NewSchemaLoader() *SchemaLoader {
-
- ps := &SchemaLoader{
- pool: &schemaPool{
- schemaPoolDocuments: make(map[string]*schemaPoolDocument),
- },
- AutoDetect: true,
- Validate: false,
- Draft: Hybrid,
- }
- ps.pool.autoDetect = &ps.AutoDetect
-
- return ps
-}
-
-func (sl *SchemaLoader) validateMetaschema(documentNode interface{}) error {
-
- var (
- schema string
- err error
- )
- if sl.AutoDetect {
- schema, _, err = parseSchemaURL(documentNode)
- if err != nil {
- return err
- }
- }
-
- // If no explicit "$schema" is used, use the default metaschema associated with the draft used
- if schema == "" {
- if sl.Draft == Hybrid {
- return nil
- }
- schema = drafts.GetSchemaURL(sl.Draft)
- }
-
- //Disable validation when loading the metaschema to prevent an infinite recursive loop
- sl.Validate = false
-
- metaSchema, err := sl.Compile(NewReferenceLoader(schema))
-
- if err != nil {
- return err
- }
-
- sl.Validate = true
-
- result := metaSchema.validateDocument(documentNode)
-
- if !result.Valid() {
- var res bytes.Buffer
- for _, err := range result.Errors() {
- res.WriteString(err.String())
- res.WriteString("\n")
- }
- return errors.New(res.String())
- }
-
- return nil
-}
-
-// AddSchemas adds an arbritrary amount of schemas to the schema cache. As this function does not require
-// an explicit URL, every schema should contain an $id, so that it can be referenced by the main schema
-func (sl *SchemaLoader) AddSchemas(loaders ...JSONLoader) error {
- emptyRef, _ := gojsonreference.NewJsonReference("")
-
- for _, loader := range loaders {
- doc, err := loader.LoadJSON()
-
- if err != nil {
- return err
- }
-
- if sl.Validate {
- if err := sl.validateMetaschema(doc); err != nil {
- return err
- }
- }
-
- // Directly use the Recursive function, so that it get only added to the schema pool by $id
- // and not by the ref of the document as it's empty
- if err = sl.pool.parseReferences(doc, emptyRef, false); err != nil {
- return err
- }
- }
-
- return nil
-}
-
-//AddSchema adds a schema under the provided URL to the schema cache
-func (sl *SchemaLoader) AddSchema(url string, loader JSONLoader) error {
-
- ref, err := gojsonreference.NewJsonReference(url)
-
- if err != nil {
- return err
- }
-
- doc, err := loader.LoadJSON()
-
- if err != nil {
- return err
- }
-
- if sl.Validate {
- if err := sl.validateMetaschema(doc); err != nil {
- return err
- }
- }
-
- return sl.pool.parseReferences(doc, ref, true)
-}
-
-// Compile loads and compiles a schema
-func (sl *SchemaLoader) Compile(rootSchema JSONLoader) (*Schema, error) {
-
- ref, err := rootSchema.JsonReference()
-
- if err != nil {
- return nil, err
- }
-
- d := Schema{}
- d.pool = sl.pool
- d.pool.jsonLoaderFactory = rootSchema.LoaderFactory()
- d.documentReference = ref
- d.referencePool = newSchemaReferencePool()
-
- var doc interface{}
- if ref.String() != "" {
- // Get document from schema pool
- spd, err := d.pool.GetDocument(d.documentReference)
- if err != nil {
- return nil, err
- }
- doc = spd.Document
- } else {
- // Load JSON directly
- doc, err = rootSchema.LoadJSON()
- if err != nil {
- return nil, err
- }
- // References need only be parsed if loading JSON directly
- // as pool.GetDocument already does this for us if loading by reference
- err = sl.pool.parseReferences(doc, ref, true)
- if err != nil {
- return nil, err
- }
- }
-
- if sl.Validate {
- if err := sl.validateMetaschema(doc); err != nil {
- return nil, err
- }
- }
-
- draft := sl.Draft
- if sl.AutoDetect {
- _, detectedDraft, err := parseSchemaURL(doc)
- if err != nil {
- return nil, err
- }
- if detectedDraft != nil {
- draft = *detectedDraft
- }
- }
-
- err = d.parse(doc, draft)
- if err != nil {
- return nil, err
- }
-
- return &d, nil
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/schemaPool.go b/vendor/github.com/xeipuuv/gojsonschema/schemaPool.go
deleted file mode 100644
index 35b1cc6..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/schemaPool.go
+++ /dev/null
@@ -1,215 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Defines resources pooling.
-// Eases referencing and avoids downloading the same resource twice.
-//
-// created 26-02-2013
-
-package gojsonschema
-
-import (
- "errors"
- "fmt"
- "reflect"
-
- "github.com/xeipuuv/gojsonreference"
-)
-
-type schemaPoolDocument struct {
- Document interface{}
- Draft *Draft
-}
-
-type schemaPool struct {
- schemaPoolDocuments map[string]*schemaPoolDocument
- jsonLoaderFactory JSONLoaderFactory
- autoDetect *bool
-}
-
-func (p *schemaPool) parseReferences(document interface{}, ref gojsonreference.JsonReference, pooled bool) error {
-
- var (
- draft *Draft
- err error
- reference = ref.String()
- )
- // Only the root document should be added to the schema pool if pooled is true
- if _, ok := p.schemaPoolDocuments[reference]; pooled && ok {
- return fmt.Errorf("Reference already exists: \"%s\"", reference)
- }
-
- if *p.autoDetect {
- _, draft, err = parseSchemaURL(document)
- if err != nil {
- return err
- }
- }
-
- err = p.parseReferencesRecursive(document, ref, draft)
-
- if pooled {
- p.schemaPoolDocuments[reference] = &schemaPoolDocument{Document: document, Draft: draft}
- }
-
- return err
-}
-
-func (p *schemaPool) parseReferencesRecursive(document interface{}, ref gojsonreference.JsonReference, draft *Draft) error {
- // parseReferencesRecursive parses a JSON document and resolves all $id and $ref references.
- // For $ref references it takes into account the $id scope it is in and replaces
- // the reference by the absolute resolved reference
-
- // When encountering errors it fails silently. Error handling is done when the schema
- // is syntactically parsed and any error encountered here should also come up there.
- switch m := document.(type) {
- case []interface{}:
- for _, v := range m {
- p.parseReferencesRecursive(v, ref, draft)
- }
- case map[string]interface{}:
- localRef := &ref
-
- keyID := KEY_ID_NEW
- if existsMapKey(m, KEY_ID) {
- keyID = KEY_ID
- }
- if existsMapKey(m, keyID) && isKind(m[keyID], reflect.String) {
- jsonReference, err := gojsonreference.NewJsonReference(m[keyID].(string))
- if err == nil {
- localRef, err = ref.Inherits(jsonReference)
- if err == nil {
- if _, ok := p.schemaPoolDocuments[localRef.String()]; ok {
- return fmt.Errorf("Reference already exists: \"%s\"", localRef.String())
- }
- p.schemaPoolDocuments[localRef.String()] = &schemaPoolDocument{Document: document, Draft: draft}
- }
- }
- }
-
- if existsMapKey(m, KEY_REF) && isKind(m[KEY_REF], reflect.String) {
- jsonReference, err := gojsonreference.NewJsonReference(m[KEY_REF].(string))
- if err == nil {
- absoluteRef, err := localRef.Inherits(jsonReference)
- if err == nil {
- m[KEY_REF] = absoluteRef.String()
- }
- }
- }
-
- for k, v := range m {
- // const and enums should be interpreted literally, so ignore them
- if k == KEY_CONST || k == KEY_ENUM {
- continue
- }
- // Something like a property or a dependency is not a valid schema, as it might describe properties named "$ref", "$id" or "const", etc
- // Therefore don't treat it like a schema.
- if k == KEY_PROPERTIES || k == KEY_DEPENDENCIES || k == KEY_PATTERN_PROPERTIES {
- if child, ok := v.(map[string]interface{}); ok {
- for _, v := range child {
- p.parseReferencesRecursive(v, *localRef, draft)
- }
- }
- } else {
- p.parseReferencesRecursive(v, *localRef, draft)
- }
- }
- }
- return nil
-}
-
-func (p *schemaPool) GetDocument(reference gojsonreference.JsonReference) (*schemaPoolDocument, error) {
-
- var (
- spd *schemaPoolDocument
- draft *Draft
- ok bool
- err error
- )
-
- if internalLogEnabled {
- internalLog("Get Document ( %s )", reference.String())
- }
-
- // Create a deep copy, so we can remove the fragment part later on without altering the original
- refToURL, _ := gojsonreference.NewJsonReference(reference.String())
-
- // First check if the given fragment is a location independent identifier
- // http://json-schema.org/latest/json-schema-core.html#rfc.section.8.2.3
-
- if spd, ok = p.schemaPoolDocuments[refToURL.String()]; ok {
- if internalLogEnabled {
- internalLog(" From pool")
- }
- return spd, nil
- }
-
- // If the given reference is not a location independent identifier,
- // strip the fragment and look for a document with it's base URI
-
- refToURL.GetUrl().Fragment = ""
-
- if cachedSpd, ok := p.schemaPoolDocuments[refToURL.String()]; ok {
- document, _, err := reference.GetPointer().Get(cachedSpd.Document)
-
- if err != nil {
- return nil, err
- }
-
- if internalLogEnabled {
- internalLog(" From pool")
- }
-
- spd = &schemaPoolDocument{Document: document, Draft: cachedSpd.Draft}
- p.schemaPoolDocuments[reference.String()] = spd
-
- return spd, nil
- }
-
- // It is not possible to load anything remotely that is not canonical...
- if !reference.IsCanonical() {
- return nil, errors.New(formatErrorDescription(
- Locale.ReferenceMustBeCanonical(),
- ErrorDetails{"reference": reference.String()},
- ))
- }
-
- jsonReferenceLoader := p.jsonLoaderFactory.New(reference.String())
- document, err := jsonReferenceLoader.LoadJSON()
-
- if err != nil {
- return nil, err
- }
-
- // add the whole document to the pool for potential re-use
- p.parseReferences(document, refToURL, true)
-
- _, draft, _ = parseSchemaURL(document)
-
- // resolve the potential fragment and also cache it
- document, _, err = reference.GetPointer().Get(document)
-
- if err != nil {
- return nil, err
- }
-
- return &schemaPoolDocument{Document: document, Draft: draft}, nil
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/schemaReferencePool.go b/vendor/github.com/xeipuuv/gojsonschema/schemaReferencePool.go
deleted file mode 100644
index 6e5e1b5..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/schemaReferencePool.go
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Pool of referenced schemas.
-//
-// created 25-06-2013
-
-package gojsonschema
-
-import (
- "fmt"
-)
-
-type schemaReferencePool struct {
- documents map[string]*subSchema
-}
-
-func newSchemaReferencePool() *schemaReferencePool {
-
- p := &schemaReferencePool{}
- p.documents = make(map[string]*subSchema)
-
- return p
-}
-
-func (p *schemaReferencePool) Get(ref string) (r *subSchema, o bool) {
-
- if internalLogEnabled {
- internalLog(fmt.Sprintf("Schema Reference ( %s )", ref))
- }
-
- if sch, ok := p.documents[ref]; ok {
- if internalLogEnabled {
- internalLog(fmt.Sprintf(" From pool"))
- }
- return sch, true
- }
-
- return nil, false
-}
-
-func (p *schemaReferencePool) Add(ref string, sch *subSchema) {
-
- if internalLogEnabled {
- internalLog(fmt.Sprintf("Add Schema Reference %s to pool", ref))
- }
- if _, ok := p.documents[ref]; !ok {
- p.documents[ref] = sch
- }
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/schemaType.go b/vendor/github.com/xeipuuv/gojsonschema/schemaType.go
deleted file mode 100644
index 36b447a..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/schemaType.go
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Helper structure to handle schema types, and the combination of them.
-//
-// created 28-02-2013
-
-package gojsonschema
-
-import (
- "errors"
- "fmt"
- "strings"
-)
-
-type jsonSchemaType struct {
- types []string
-}
-
-// Is the schema typed ? that is containing at least one type
-// When not typed, the schema does not need any type validation
-func (t *jsonSchemaType) IsTyped() bool {
- return len(t.types) > 0
-}
-
-func (t *jsonSchemaType) Add(etype string) error {
-
- if !isStringInSlice(JSON_TYPES, etype) {
- return errors.New(formatErrorDescription(Locale.NotAValidType(), ErrorDetails{"given": "/" + etype + "/", "expected": JSON_TYPES}))
- }
-
- if t.Contains(etype) {
- return errors.New(formatErrorDescription(Locale.Duplicated(), ErrorDetails{"type": etype}))
- }
-
- t.types = append(t.types, etype)
-
- return nil
-}
-
-func (t *jsonSchemaType) Contains(etype string) bool {
-
- for _, v := range t.types {
- if v == etype {
- return true
- }
- }
-
- return false
-}
-
-func (t *jsonSchemaType) String() string {
-
- if len(t.types) == 0 {
- return STRING_UNDEFINED // should never happen
- }
-
- // Displayed as a list [type1,type2,...]
- if len(t.types) > 1 {
- return fmt.Sprintf("[%s]", strings.Join(t.types, ","))
- }
-
- // Only one type: name only
- return t.types[0]
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/subSchema.go b/vendor/github.com/xeipuuv/gojsonschema/subSchema.go
deleted file mode 100644
index ec77981..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/subSchema.go
+++ /dev/null
@@ -1,149 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Defines the structure of a sub-subSchema.
-// A sub-subSchema can contain other sub-schemas.
-//
-// created 27-02-2013
-
-package gojsonschema
-
-import (
- "github.com/xeipuuv/gojsonreference"
- "math/big"
- "regexp"
-)
-
-// Constants
-const (
- KEY_SCHEMA = "$schema"
- KEY_ID = "id"
- KEY_ID_NEW = "$id"
- KEY_REF = "$ref"
- KEY_TITLE = "title"
- KEY_DESCRIPTION = "description"
- KEY_TYPE = "type"
- KEY_ITEMS = "items"
- KEY_ADDITIONAL_ITEMS = "additionalItems"
- KEY_PROPERTIES = "properties"
- KEY_PATTERN_PROPERTIES = "patternProperties"
- KEY_ADDITIONAL_PROPERTIES = "additionalProperties"
- KEY_PROPERTY_NAMES = "propertyNames"
- KEY_DEFINITIONS = "definitions"
- KEY_MULTIPLE_OF = "multipleOf"
- KEY_MINIMUM = "minimum"
- KEY_MAXIMUM = "maximum"
- KEY_EXCLUSIVE_MINIMUM = "exclusiveMinimum"
- KEY_EXCLUSIVE_MAXIMUM = "exclusiveMaximum"
- KEY_MIN_LENGTH = "minLength"
- KEY_MAX_LENGTH = "maxLength"
- KEY_PATTERN = "pattern"
- KEY_FORMAT = "format"
- KEY_MIN_PROPERTIES = "minProperties"
- KEY_MAX_PROPERTIES = "maxProperties"
- KEY_DEPENDENCIES = "dependencies"
- KEY_REQUIRED = "required"
- KEY_MIN_ITEMS = "minItems"
- KEY_MAX_ITEMS = "maxItems"
- KEY_UNIQUE_ITEMS = "uniqueItems"
- KEY_CONTAINS = "contains"
- KEY_CONST = "const"
- KEY_ENUM = "enum"
- KEY_ONE_OF = "oneOf"
- KEY_ANY_OF = "anyOf"
- KEY_ALL_OF = "allOf"
- KEY_NOT = "not"
- KEY_IF = "if"
- KEY_THEN = "then"
- KEY_ELSE = "else"
-)
-
-type subSchema struct {
- draft *Draft
-
- // basic subSchema meta properties
- id *gojsonreference.JsonReference
- title *string
- description *string
-
- property string
-
- // Quick pass/fail for boolean schemas
- pass *bool
-
- // Types associated with the subSchema
- types jsonSchemaType
-
- // Reference url
- ref *gojsonreference.JsonReference
- // Schema referenced
- refSchema *subSchema
-
- // hierarchy
- parent *subSchema
- itemsChildren []*subSchema
- itemsChildrenIsSingleSchema bool
- propertiesChildren []*subSchema
-
- // validation : number / integer
- multipleOf *big.Rat
- maximum *big.Rat
- exclusiveMaximum *big.Rat
- minimum *big.Rat
- exclusiveMinimum *big.Rat
-
- // validation : string
- minLength *int
- maxLength *int
- pattern *regexp.Regexp
- format string
-
- // validation : object
- minProperties *int
- maxProperties *int
- required []string
-
- dependencies map[string]interface{}
- additionalProperties interface{}
- patternProperties map[string]*subSchema
- propertyNames *subSchema
-
- // validation : array
- minItems *int
- maxItems *int
- uniqueItems bool
- contains *subSchema
-
- additionalItems interface{}
-
- // validation : all
- _const *string //const is a golang keyword
- enum []string
-
- // validation : subSchema
- oneOf []*subSchema
- anyOf []*subSchema
- allOf []*subSchema
- not *subSchema
- _if *subSchema // if/else are golang keywords
- _then *subSchema
- _else *subSchema
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/types.go b/vendor/github.com/xeipuuv/gojsonschema/types.go
deleted file mode 100644
index 0e6fd51..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/types.go
+++ /dev/null
@@ -1,62 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Contains const types for schema and JSON.
-//
-// created 28-02-2013
-
-package gojsonschema
-
-// Type constants
-const (
- TYPE_ARRAY = `array`
- TYPE_BOOLEAN = `boolean`
- TYPE_INTEGER = `integer`
- TYPE_NUMBER = `number`
- TYPE_NULL = `null`
- TYPE_OBJECT = `object`
- TYPE_STRING = `string`
-)
-
-// JSON_TYPES hosts the list of type that are supported in JSON
-var JSON_TYPES []string
-
-// SCHEMA_TYPES hosts the list of type that are supported in schemas
-var SCHEMA_TYPES []string
-
-func init() {
- JSON_TYPES = []string{
- TYPE_ARRAY,
- TYPE_BOOLEAN,
- TYPE_INTEGER,
- TYPE_NUMBER,
- TYPE_NULL,
- TYPE_OBJECT,
- TYPE_STRING}
-
- SCHEMA_TYPES = []string{
- TYPE_ARRAY,
- TYPE_BOOLEAN,
- TYPE_INTEGER,
- TYPE_NUMBER,
- TYPE_OBJECT,
- TYPE_STRING}
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/utils.go b/vendor/github.com/xeipuuv/gojsonschema/utils.go
deleted file mode 100644
index a17d22e..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/utils.go
+++ /dev/null
@@ -1,197 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Various utility functions.
-//
-// created 26-02-2013
-
-package gojsonschema
-
-import (
- "encoding/json"
- "math/big"
- "reflect"
-)
-
-func isKind(what interface{}, kinds ...reflect.Kind) bool {
- target := what
- if isJSONNumber(what) {
- // JSON Numbers are strings!
- target = *mustBeNumber(what)
- }
- targetKind := reflect.ValueOf(target).Kind()
- for _, kind := range kinds {
- if targetKind == kind {
- return true
- }
- }
- return false
-}
-
-func existsMapKey(m map[string]interface{}, k string) bool {
- _, ok := m[k]
- return ok
-}
-
-func isStringInSlice(s []string, what string) bool {
- for i := range s {
- if s[i] == what {
- return true
- }
- }
- return false
-}
-
-// indexStringInSlice returns the index of the first instance of 'what' in s or -1 if it is not found in s.
-func indexStringInSlice(s []string, what string) int {
- for i := range s {
- if s[i] == what {
- return i
- }
- }
- return -1
-}
-
-func marshalToJSONString(value interface{}) (*string, error) {
-
- mBytes, err := json.Marshal(value)
- if err != nil {
- return nil, err
- }
-
- sBytes := string(mBytes)
- return &sBytes, nil
-}
-
-func marshalWithoutNumber(value interface{}) (*string, error) {
-
- // The JSON is decoded using https://golang.org/pkg/encoding/json/#Decoder.UseNumber
- // This means the numbers are internally still represented as strings and therefore 1.00 is unequal to 1
- // One way to eliminate these differences is to decode and encode the JSON one more time without Decoder.UseNumber
- // so that these differences in representation are removed
-
- jsonString, err := marshalToJSONString(value)
- if err != nil {
- return nil, err
- }
-
- var document interface{}
-
- err = json.Unmarshal([]byte(*jsonString), &document)
- if err != nil {
- return nil, err
- }
-
- return marshalToJSONString(document)
-}
-
-func isJSONNumber(what interface{}) bool {
-
- switch what.(type) {
-
- case json.Number:
- return true
- }
-
- return false
-}
-
-func checkJSONInteger(what interface{}) (isInt bool) {
-
- jsonNumber := what.(json.Number)
-
- bigFloat, isValidNumber := new(big.Rat).SetString(string(jsonNumber))
-
- return isValidNumber && bigFloat.IsInt()
-
-}
-
-// same as ECMA Number.MAX_SAFE_INTEGER and Number.MIN_SAFE_INTEGER
-const (
- maxJSONFloat = float64(1<<53 - 1) // 9007199254740991.0 2^53 - 1
- minJSONFloat = -float64(1<<53 - 1) //-9007199254740991.0 -2^53 - 1
-)
-
-func mustBeInteger(what interface{}) *int {
-
- if isJSONNumber(what) {
-
- number := what.(json.Number)
-
- isInt := checkJSONInteger(number)
-
- if isInt {
-
- int64Value, err := number.Int64()
- if err != nil {
- return nil
- }
-
- int32Value := int(int64Value)
- return &int32Value
- }
-
- }
-
- return nil
-}
-
-func mustBeNumber(what interface{}) *big.Rat {
-
- if isJSONNumber(what) {
- number := what.(json.Number)
- float64Value, success := new(big.Rat).SetString(string(number))
- if success {
- return float64Value
- }
- }
-
- return nil
-
-}
-
-func convertDocumentNode(val interface{}) interface{} {
-
- if lval, ok := val.([]interface{}); ok {
-
- res := []interface{}{}
- for _, v := range lval {
- res = append(res, convertDocumentNode(v))
- }
-
- return res
-
- }
-
- if mval, ok := val.(map[interface{}]interface{}); ok {
-
- res := map[string]interface{}{}
-
- for k, v := range mval {
- res[k.(string)] = convertDocumentNode(v)
- }
-
- return res
-
- }
-
- return val
-}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/validation.go b/vendor/github.com/xeipuuv/gojsonschema/validation.go
deleted file mode 100644
index 74091bc..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/validation.go
+++ /dev/null
@@ -1,858 +0,0 @@
-// Copyright 2015 xeipuuv ( /~https://github.com/xeipuuv )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author xeipuuv
-// author-github /~https://github.com/xeipuuv
-// author-mail xeipuuv@gmail.com
-//
-// repository-name gojsonschema
-// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language.
-//
-// description Extends Schema and subSchema, implements the validation phase.
-//
-// created 28-02-2013
-
-package gojsonschema
-
-import (
- "encoding/json"
- "math/big"
- "reflect"
- "regexp"
- "strconv"
- "strings"
- "unicode/utf8"
-)
-
-// Validate loads and validates a JSON schema
-func Validate(ls JSONLoader, ld JSONLoader) (*Result, error) {
- // load schema
- schema, err := NewSchema(ls)
- if err != nil {
- return nil, err
- }
- return schema.Validate(ld)
-}
-
-// Validate loads and validates a JSON document
-func (v *Schema) Validate(l JSONLoader) (*Result, error) {
- root, err := l.LoadJSON()
- if err != nil {
- return nil, err
- }
- return v.validateDocument(root), nil
-}
-
-func (v *Schema) validateDocument(root interface{}) *Result {
- result := &Result{}
- context := NewJsonContext(STRING_CONTEXT_ROOT, nil)
- v.rootSchema.validateRecursive(v.rootSchema, root, result, context)
- return result
-}
-
-func (v *subSchema) subValidateWithContext(document interface{}, context *JsonContext) *Result {
- result := &Result{}
- v.validateRecursive(v, document, result, context)
- return result
-}
-
-// Walker function to validate the json recursively against the subSchema
-func (v *subSchema) validateRecursive(currentSubSchema *subSchema, currentNode interface{}, result *Result, context *JsonContext) {
-
- if internalLogEnabled {
- internalLog("validateRecursive %s", context.String())
- internalLog(" %v", currentNode)
- }
-
- // Handle true/false schema as early as possible as all other fields will be nil
- if currentSubSchema.pass != nil {
- if !*currentSubSchema.pass {
- result.addInternalError(
- new(FalseError),
- context,
- currentNode,
- ErrorDetails{},
- )
- }
- return
- }
-
- // Handle referenced schemas, returns directly when a $ref is found
- if currentSubSchema.refSchema != nil {
- v.validateRecursive(currentSubSchema.refSchema, currentNode, result, context)
- return
- }
-
- // Check for null value
- if currentNode == nil {
- if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_NULL) {
- result.addInternalError(
- new(InvalidTypeError),
- context,
- currentNode,
- ErrorDetails{
- "expected": currentSubSchema.types.String(),
- "given": TYPE_NULL,
- },
- )
- return
- }
-
- currentSubSchema.validateSchema(currentSubSchema, currentNode, result, context)
- v.validateCommon(currentSubSchema, currentNode, result, context)
-
- } else { // Not a null value
-
- if isJSONNumber(currentNode) {
-
- value := currentNode.(json.Number)
-
- isInt := checkJSONInteger(value)
-
- validType := currentSubSchema.types.Contains(TYPE_NUMBER) || (isInt && currentSubSchema.types.Contains(TYPE_INTEGER))
-
- if currentSubSchema.types.IsTyped() && !validType {
-
- givenType := TYPE_INTEGER
- if !isInt {
- givenType = TYPE_NUMBER
- }
-
- result.addInternalError(
- new(InvalidTypeError),
- context,
- currentNode,
- ErrorDetails{
- "expected": currentSubSchema.types.String(),
- "given": givenType,
- },
- )
- return
- }
-
- currentSubSchema.validateSchema(currentSubSchema, value, result, context)
- v.validateNumber(currentSubSchema, value, result, context)
- v.validateCommon(currentSubSchema, value, result, context)
- v.validateString(currentSubSchema, value, result, context)
-
- } else {
-
- rValue := reflect.ValueOf(currentNode)
- rKind := rValue.Kind()
-
- switch rKind {
-
- // Slice => JSON array
-
- case reflect.Slice:
-
- if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_ARRAY) {
- result.addInternalError(
- new(InvalidTypeError),
- context,
- currentNode,
- ErrorDetails{
- "expected": currentSubSchema.types.String(),
- "given": TYPE_ARRAY,
- },
- )
- return
- }
-
- castCurrentNode := currentNode.([]interface{})
-
- currentSubSchema.validateSchema(currentSubSchema, castCurrentNode, result, context)
-
- v.validateArray(currentSubSchema, castCurrentNode, result, context)
- v.validateCommon(currentSubSchema, castCurrentNode, result, context)
-
- // Map => JSON object
-
- case reflect.Map:
- if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_OBJECT) {
- result.addInternalError(
- new(InvalidTypeError),
- context,
- currentNode,
- ErrorDetails{
- "expected": currentSubSchema.types.String(),
- "given": TYPE_OBJECT,
- },
- )
- return
- }
-
- castCurrentNode, ok := currentNode.(map[string]interface{})
- if !ok {
- castCurrentNode = convertDocumentNode(currentNode).(map[string]interface{})
- }
-
- currentSubSchema.validateSchema(currentSubSchema, castCurrentNode, result, context)
-
- v.validateObject(currentSubSchema, castCurrentNode, result, context)
- v.validateCommon(currentSubSchema, castCurrentNode, result, context)
-
- for _, pSchema := range currentSubSchema.propertiesChildren {
- nextNode, ok := castCurrentNode[pSchema.property]
- if ok {
- subContext := NewJsonContext(pSchema.property, context)
- v.validateRecursive(pSchema, nextNode, result, subContext)
- }
- }
-
- // Simple JSON values : string, number, boolean
-
- case reflect.Bool:
-
- if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_BOOLEAN) {
- result.addInternalError(
- new(InvalidTypeError),
- context,
- currentNode,
- ErrorDetails{
- "expected": currentSubSchema.types.String(),
- "given": TYPE_BOOLEAN,
- },
- )
- return
- }
-
- value := currentNode.(bool)
-
- currentSubSchema.validateSchema(currentSubSchema, value, result, context)
- v.validateNumber(currentSubSchema, value, result, context)
- v.validateCommon(currentSubSchema, value, result, context)
- v.validateString(currentSubSchema, value, result, context)
-
- case reflect.String:
-
- if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_STRING) {
- result.addInternalError(
- new(InvalidTypeError),
- context,
- currentNode,
- ErrorDetails{
- "expected": currentSubSchema.types.String(),
- "given": TYPE_STRING,
- },
- )
- return
- }
-
- value := currentNode.(string)
-
- currentSubSchema.validateSchema(currentSubSchema, value, result, context)
- v.validateNumber(currentSubSchema, value, result, context)
- v.validateCommon(currentSubSchema, value, result, context)
- v.validateString(currentSubSchema, value, result, context)
-
- }
-
- }
-
- }
-
- result.incrementScore()
-}
-
-// Different kinds of validation there, subSchema / common / array / object / string...
-func (v *subSchema) validateSchema(currentSubSchema *subSchema, currentNode interface{}, result *Result, context *JsonContext) {
-
- if internalLogEnabled {
- internalLog("validateSchema %s", context.String())
- internalLog(" %v", currentNode)
- }
-
- if len(currentSubSchema.anyOf) > 0 {
-
- validatedAnyOf := false
- var bestValidationResult *Result
-
- for _, anyOfSchema := range currentSubSchema.anyOf {
- if !validatedAnyOf {
- validationResult := anyOfSchema.subValidateWithContext(currentNode, context)
- validatedAnyOf = validationResult.Valid()
-
- if !validatedAnyOf && (bestValidationResult == nil || validationResult.score > bestValidationResult.score) {
- bestValidationResult = validationResult
- }
- }
- }
- if !validatedAnyOf {
-
- result.addInternalError(new(NumberAnyOfError), context, currentNode, ErrorDetails{})
-
- if bestValidationResult != nil {
- // add error messages of closest matching subSchema as
- // that's probably the one the user was trying to match
- result.mergeErrors(bestValidationResult)
- }
- }
- }
-
- if len(currentSubSchema.oneOf) > 0 {
-
- nbValidated := 0
- var bestValidationResult *Result
-
- for _, oneOfSchema := range currentSubSchema.oneOf {
- validationResult := oneOfSchema.subValidateWithContext(currentNode, context)
- if validationResult.Valid() {
- nbValidated++
- } else if nbValidated == 0 && (bestValidationResult == nil || validationResult.score > bestValidationResult.score) {
- bestValidationResult = validationResult
- }
- }
-
- if nbValidated != 1 {
-
- result.addInternalError(new(NumberOneOfError), context, currentNode, ErrorDetails{})
-
- if nbValidated == 0 {
- // add error messages of closest matching subSchema as
- // that's probably the one the user was trying to match
- result.mergeErrors(bestValidationResult)
- }
- }
-
- }
-
- if len(currentSubSchema.allOf) > 0 {
- nbValidated := 0
-
- for _, allOfSchema := range currentSubSchema.allOf {
- validationResult := allOfSchema.subValidateWithContext(currentNode, context)
- if validationResult.Valid() {
- nbValidated++
- }
- result.mergeErrors(validationResult)
- }
-
- if nbValidated != len(currentSubSchema.allOf) {
- result.addInternalError(new(NumberAllOfError), context, currentNode, ErrorDetails{})
- }
- }
-
- if currentSubSchema.not != nil {
- validationResult := currentSubSchema.not.subValidateWithContext(currentNode, context)
- if validationResult.Valid() {
- result.addInternalError(new(NumberNotError), context, currentNode, ErrorDetails{})
- }
- }
-
- if currentSubSchema.dependencies != nil && len(currentSubSchema.dependencies) > 0 {
- if isKind(currentNode, reflect.Map) {
- for elementKey := range currentNode.(map[string]interface{}) {
- if dependency, ok := currentSubSchema.dependencies[elementKey]; ok {
- switch dependency := dependency.(type) {
-
- case []string:
- for _, dependOnKey := range dependency {
- if _, dependencyResolved := currentNode.(map[string]interface{})[dependOnKey]; !dependencyResolved {
- result.addInternalError(
- new(MissingDependencyError),
- context,
- currentNode,
- ErrorDetails{"dependency": dependOnKey},
- )
- }
- }
-
- case *subSchema:
- dependency.validateRecursive(dependency, currentNode, result, context)
- }
- }
- }
- }
- }
-
- if currentSubSchema._if != nil {
- validationResultIf := currentSubSchema._if.subValidateWithContext(currentNode, context)
- if currentSubSchema._then != nil && validationResultIf.Valid() {
- validationResultThen := currentSubSchema._then.subValidateWithContext(currentNode, context)
- if !validationResultThen.Valid() {
- result.addInternalError(new(ConditionThenError), context, currentNode, ErrorDetails{})
- result.mergeErrors(validationResultThen)
- }
- }
- if currentSubSchema._else != nil && !validationResultIf.Valid() {
- validationResultElse := currentSubSchema._else.subValidateWithContext(currentNode, context)
- if !validationResultElse.Valid() {
- result.addInternalError(new(ConditionElseError), context, currentNode, ErrorDetails{})
- result.mergeErrors(validationResultElse)
- }
- }
- }
-
- result.incrementScore()
-}
-
-func (v *subSchema) validateCommon(currentSubSchema *subSchema, value interface{}, result *Result, context *JsonContext) {
-
- if internalLogEnabled {
- internalLog("validateCommon %s", context.String())
- internalLog(" %v", value)
- }
-
- // const:
- if currentSubSchema._const != nil {
- vString, err := marshalWithoutNumber(value)
- if err != nil {
- result.addInternalError(new(InternalError), context, value, ErrorDetails{"error": err})
- }
- if *vString != *currentSubSchema._const {
- result.addInternalError(new(ConstError),
- context,
- value,
- ErrorDetails{
- "allowed": *currentSubSchema._const,
- },
- )
- }
- }
-
- // enum:
- if len(currentSubSchema.enum) > 0 {
- vString, err := marshalWithoutNumber(value)
- if err != nil {
- result.addInternalError(new(InternalError), context, value, ErrorDetails{"error": err})
- }
- if !isStringInSlice(currentSubSchema.enum, *vString) {
- result.addInternalError(
- new(EnumError),
- context,
- value,
- ErrorDetails{
- "allowed": strings.Join(currentSubSchema.enum, ", "),
- },
- )
- }
- }
-
- result.incrementScore()
-}
-
-func (v *subSchema) validateArray(currentSubSchema *subSchema, value []interface{}, result *Result, context *JsonContext) {
-
- if internalLogEnabled {
- internalLog("validateArray %s", context.String())
- internalLog(" %v", value)
- }
-
- nbValues := len(value)
-
- // TODO explain
- if currentSubSchema.itemsChildrenIsSingleSchema {
- for i := range value {
- subContext := NewJsonContext(strconv.Itoa(i), context)
- validationResult := currentSubSchema.itemsChildren[0].subValidateWithContext(value[i], subContext)
- result.mergeErrors(validationResult)
- }
- } else {
- if currentSubSchema.itemsChildren != nil && len(currentSubSchema.itemsChildren) > 0 {
-
- nbItems := len(currentSubSchema.itemsChildren)
-
- // while we have both schemas and values, check them against each other
- for i := 0; i != nbItems && i != nbValues; i++ {
- subContext := NewJsonContext(strconv.Itoa(i), context)
- validationResult := currentSubSchema.itemsChildren[i].subValidateWithContext(value[i], subContext)
- result.mergeErrors(validationResult)
- }
-
- if nbItems < nbValues {
- // we have less schemas than elements in the instance array,
- // but that might be ok if "additionalItems" is specified.
-
- switch currentSubSchema.additionalItems.(type) {
- case bool:
- if !currentSubSchema.additionalItems.(bool) {
- result.addInternalError(new(ArrayNoAdditionalItemsError), context, value, ErrorDetails{})
- }
- case *subSchema:
- additionalItemSchema := currentSubSchema.additionalItems.(*subSchema)
- for i := nbItems; i != nbValues; i++ {
- subContext := NewJsonContext(strconv.Itoa(i), context)
- validationResult := additionalItemSchema.subValidateWithContext(value[i], subContext)
- result.mergeErrors(validationResult)
- }
- }
- }
- }
- }
-
- // minItems & maxItems
- if currentSubSchema.minItems != nil {
- if nbValues < int(*currentSubSchema.minItems) {
- result.addInternalError(
- new(ArrayMinItemsError),
- context,
- value,
- ErrorDetails{"min": *currentSubSchema.minItems},
- )
- }
- }
- if currentSubSchema.maxItems != nil {
- if nbValues > int(*currentSubSchema.maxItems) {
- result.addInternalError(
- new(ArrayMaxItemsError),
- context,
- value,
- ErrorDetails{"max": *currentSubSchema.maxItems},
- )
- }
- }
-
- // uniqueItems:
- if currentSubSchema.uniqueItems {
- var stringifiedItems = make(map[string]int)
- for j, v := range value {
- vString, err := marshalWithoutNumber(v)
- if err != nil {
- result.addInternalError(new(InternalError), context, value, ErrorDetails{"err": err})
- }
- if i, ok := stringifiedItems[*vString]; ok {
- result.addInternalError(
- new(ItemsMustBeUniqueError),
- context,
- value,
- ErrorDetails{"type": TYPE_ARRAY, "i": i, "j": j},
- )
- }
- stringifiedItems[*vString] = j
- }
- }
-
- // contains:
-
- if currentSubSchema.contains != nil {
- validatedOne := false
- var bestValidationResult *Result
-
- for i, v := range value {
- subContext := NewJsonContext(strconv.Itoa(i), context)
-
- validationResult := currentSubSchema.contains.subValidateWithContext(v, subContext)
- if validationResult.Valid() {
- validatedOne = true
- break
- } else {
- if bestValidationResult == nil || validationResult.score > bestValidationResult.score {
- bestValidationResult = validationResult
- }
- }
- }
- if !validatedOne {
- result.addInternalError(
- new(ArrayContainsError),
- context,
- value,
- ErrorDetails{},
- )
- if bestValidationResult != nil {
- result.mergeErrors(bestValidationResult)
- }
- }
- }
-
- result.incrementScore()
-}
-
-func (v *subSchema) validateObject(currentSubSchema *subSchema, value map[string]interface{}, result *Result, context *JsonContext) {
-
- if internalLogEnabled {
- internalLog("validateObject %s", context.String())
- internalLog(" %v", value)
- }
-
- // minProperties & maxProperties:
- if currentSubSchema.minProperties != nil {
- if len(value) < int(*currentSubSchema.minProperties) {
- result.addInternalError(
- new(ArrayMinPropertiesError),
- context,
- value,
- ErrorDetails{"min": *currentSubSchema.minProperties},
- )
- }
- }
- if currentSubSchema.maxProperties != nil {
- if len(value) > int(*currentSubSchema.maxProperties) {
- result.addInternalError(
- new(ArrayMaxPropertiesError),
- context,
- value,
- ErrorDetails{"max": *currentSubSchema.maxProperties},
- )
- }
- }
-
- // required:
- for _, requiredProperty := range currentSubSchema.required {
- _, ok := value[requiredProperty]
- if ok {
- result.incrementScore()
- } else {
- result.addInternalError(
- new(RequiredError),
- context,
- value,
- ErrorDetails{"property": requiredProperty},
- )
- }
- }
-
- // additionalProperty & patternProperty:
- for pk := range value {
-
- // Check whether this property is described by "properties"
- found := false
- for _, spValue := range currentSubSchema.propertiesChildren {
- if pk == spValue.property {
- found = true
- }
- }
-
- // Check whether this property is described by "patternProperties"
- ppMatch := v.validatePatternProperty(currentSubSchema, pk, value[pk], result, context)
-
- // If it is not described by neither "properties" nor "patternProperties" it must pass "additionalProperties"
- if !found && !ppMatch {
- switch ap := currentSubSchema.additionalProperties.(type) {
- case bool:
- // Handle the boolean case separately as it's cleaner to return a specific error than failing to pass the false schema
- if !ap {
- result.addInternalError(
- new(AdditionalPropertyNotAllowedError),
- context,
- value[pk],
- ErrorDetails{"property": pk},
- )
-
- }
- case *subSchema:
- validationResult := ap.subValidateWithContext(value[pk], NewJsonContext(pk, context))
- result.mergeErrors(validationResult)
- }
- }
- }
-
- // propertyNames:
- if currentSubSchema.propertyNames != nil {
- for pk := range value {
- validationResult := currentSubSchema.propertyNames.subValidateWithContext(pk, context)
- if !validationResult.Valid() {
- result.addInternalError(new(InvalidPropertyNameError),
- context,
- value, ErrorDetails{
- "property": pk,
- })
- result.mergeErrors(validationResult)
- }
- }
- }
-
- result.incrementScore()
-}
-
-func (v *subSchema) validatePatternProperty(currentSubSchema *subSchema, key string, value interface{}, result *Result, context *JsonContext) bool {
-
- if internalLogEnabled {
- internalLog("validatePatternProperty %s", context.String())
- internalLog(" %s %v", key, value)
- }
-
- validated := false
-
- for pk, pv := range currentSubSchema.patternProperties {
- if matches, _ := regexp.MatchString(pk, key); matches {
- validated = true
- subContext := NewJsonContext(key, context)
- validationResult := pv.subValidateWithContext(value, subContext)
- result.mergeErrors(validationResult)
- }
- }
-
- if !validated {
- return false
- }
-
- result.incrementScore()
- return true
-}
-
-func (v *subSchema) validateString(currentSubSchema *subSchema, value interface{}, result *Result, context *JsonContext) {
-
- // Ignore JSON numbers
- if isJSONNumber(value) {
- return
- }
-
- // Ignore non strings
- if !isKind(value, reflect.String) {
- return
- }
-
- if internalLogEnabled {
- internalLog("validateString %s", context.String())
- internalLog(" %v", value)
- }
-
- stringValue := value.(string)
-
- // minLength & maxLength:
- if currentSubSchema.minLength != nil {
- if utf8.RuneCount([]byte(stringValue)) < int(*currentSubSchema.minLength) {
- result.addInternalError(
- new(StringLengthGTEError),
- context,
- value,
- ErrorDetails{"min": *currentSubSchema.minLength},
- )
- }
- }
- if currentSubSchema.maxLength != nil {
- if utf8.RuneCount([]byte(stringValue)) > int(*currentSubSchema.maxLength) {
- result.addInternalError(
- new(StringLengthLTEError),
- context,
- value,
- ErrorDetails{"max": *currentSubSchema.maxLength},
- )
- }
- }
-
- // pattern:
- if currentSubSchema.pattern != nil {
- if !currentSubSchema.pattern.MatchString(stringValue) {
- result.addInternalError(
- new(DoesNotMatchPatternError),
- context,
- value,
- ErrorDetails{"pattern": currentSubSchema.pattern},
- )
-
- }
- }
-
- // format
- if currentSubSchema.format != "" {
- if !FormatCheckers.IsFormat(currentSubSchema.format, stringValue) {
- result.addInternalError(
- new(DoesNotMatchFormatError),
- context,
- value,
- ErrorDetails{"format": currentSubSchema.format},
- )
- }
- }
-
- result.incrementScore()
-}
-
-func (v *subSchema) validateNumber(currentSubSchema *subSchema, value interface{}, result *Result, context *JsonContext) {
-
- // Ignore non numbers
- if !isJSONNumber(value) {
- return
- }
-
- if internalLogEnabled {
- internalLog("validateNumber %s", context.String())
- internalLog(" %v", value)
- }
-
- number := value.(json.Number)
- float64Value, _ := new(big.Rat).SetString(string(number))
-
- // multipleOf:
- if currentSubSchema.multipleOf != nil {
- if q := new(big.Rat).Quo(float64Value, currentSubSchema.multipleOf); !q.IsInt() {
- result.addInternalError(
- new(MultipleOfError),
- context,
- number,
- ErrorDetails{
- "multiple": new(big.Float).SetRat(currentSubSchema.multipleOf),
- },
- )
- }
- }
-
- //maximum & exclusiveMaximum:
- if currentSubSchema.maximum != nil {
- if float64Value.Cmp(currentSubSchema.maximum) == 1 {
- result.addInternalError(
- new(NumberLTEError),
- context,
- number,
- ErrorDetails{
- "max": new(big.Float).SetRat(currentSubSchema.maximum),
- },
- )
- }
- }
- if currentSubSchema.exclusiveMaximum != nil {
- if float64Value.Cmp(currentSubSchema.exclusiveMaximum) >= 0 {
- result.addInternalError(
- new(NumberLTError),
- context,
- number,
- ErrorDetails{
- "max": new(big.Float).SetRat(currentSubSchema.exclusiveMaximum),
- },
- )
- }
- }
-
- //minimum & exclusiveMinimum:
- if currentSubSchema.minimum != nil {
- if float64Value.Cmp(currentSubSchema.minimum) == -1 {
- result.addInternalError(
- new(NumberGTEError),
- context,
- number,
- ErrorDetails{
- "min": new(big.Float).SetRat(currentSubSchema.minimum),
- },
- )
- }
- }
- if currentSubSchema.exclusiveMinimum != nil {
- if float64Value.Cmp(currentSubSchema.exclusiveMinimum) <= 0 {
- result.addInternalError(
- new(NumberGTError),
- context,
- number,
- ErrorDetails{
- "min": new(big.Float).SetRat(currentSubSchema.exclusiveMinimum),
- },
- )
- }
- }
-
- // format
- if currentSubSchema.format != "" {
- if !FormatCheckers.IsFormat(currentSubSchema.format, float64Value) {
- result.addInternalError(
- new(DoesNotMatchFormatError),
- context,
- value,
- ErrorDetails{"format": currentSubSchema.format},
- )
- }
- }
-
- result.incrementScore()
-}
diff --git a/vendor/gopkg.in/yaml.v2/go.mod b/vendor/gopkg.in/yaml.v2/go.mod
deleted file mode 100644
index 2cbb85a..0000000
--- a/vendor/gopkg.in/yaml.v2/go.mod
+++ /dev/null
@@ -1,5 +0,0 @@
-module gopkg.in/yaml.v2
-
-go 1.15
-
-require gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 440e8b6..706d0bb 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -1,18 +1,10 @@
-# github.com/beevik/etree v1.1.0
-## explicit
-github.com/beevik/etree
-# github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb
-## explicit
-github.com/xeipuuv/gojsonpointer
-# github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415
-## explicit
-github.com/xeipuuv/gojsonreference
-# github.com/xeipuuv/gojsonschema v1.2.0
-## explicit
-github.com/xeipuuv/gojsonschema
+# github.com/santhosh-tekuri/jsonschema/v5 v5.1.1
+## explicit; go 1.15
+github.com/santhosh-tekuri/jsonschema/v5
+github.com/santhosh-tekuri/jsonschema/v5/httploader
# gopkg.in/yaml.v2 v2.4.0
-## explicit
+## explicit; go 1.15
gopkg.in/yaml.v2
# sigs.k8s.io/yaml v1.2.0
-## explicit
+## explicit; go 1.12
sigs.k8s.io/yaml
diff --git a/vendor/sigs.k8s.io/yaml/go.mod b/vendor/sigs.k8s.io/yaml/go.mod
deleted file mode 100644
index 7224f34..0000000
--- a/vendor/sigs.k8s.io/yaml/go.mod
+++ /dev/null
@@ -1,8 +0,0 @@
-module sigs.k8s.io/yaml
-
-go 1.12
-
-require (
- github.com/davecgh/go-spew v1.1.1
- gopkg.in/yaml.v2 v2.2.8
-)
diff --git a/vendor/sigs.k8s.io/yaml/go.sum b/vendor/sigs.k8s.io/yaml/go.sum
deleted file mode 100644
index 76e4948..0000000
--- a/vendor/sigs.k8s.io/yaml/go.sum
+++ /dev/null
@@ -1,9 +0,0 @@
-github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
-gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
-gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
-gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=