Merge branch 'master' into track-patterns

This commit is contained in:
risk danger olson 2017-02-13 14:05:46 -07:00
commit f8b2f12d3e
57 changed files with 1996 additions and 1054 deletions

@ -146,6 +146,9 @@ release.
* rpm
* apt
* Bump homebrew version and generate the homebrew hash with `curl --location https://github.com/git-lfs/git-lfs/archive/vx.y.z.tar.gz | shasum -a 256` ([example](https://github.com/Homebrew/homebrew-core/pull/413/commits/dc0eb1f62514f48f3f5a8d01ad3bea06f78bd566))
* Create release branch for bug fixes, such as `release-1.5`.
* Increment version in `config/version.go` to the next expected version. If
v1.5 just shipped, set the version in master to `1.6-pre`, for example.
## Resources

@ -1,7 +1,15 @@
# Git Large File Storage
[![Build Status](https://travis-ci.org/git-lfs/git-lfs.svg?branch=master)](https://travis-ci.org/git-lfs/git-lfs)
[![Build status](https://ci.appveyor.com/api/projects/status/46a5yoqc3hk59bl5/branch/master?svg=true)](https://ci.appveyor.com/project/git-lfs/git-lfs/branch/master)
| Linux | macOS | Windows |
| :---- | :------ | :---- |
[ ![Linux build status][1]][2] | [![macOS build status][3]][4] | [![Windows build status][5]][6] |
[1]: https://travis-ci.org/git-lfs/git-lfs.svg?branch=master
[2]: https://travis-ci.org/git-lfs/git-lfs
[3]: https://circleci.com/gh/git-lfs/git-lfs.svg?style=shield&circle-token=856152c2b02bfd236f54d21e1f581f3e4ebf47ad
[4]: https://circleci.com/gh/git-lfs/git-lfs
[5]: https://ci.appveyor.com/api/projects/status/46a5yoqc3hk59bl5/branch/master?svg=true
[6]: https://ci.appveyor.com/project/git-lfs/git-lfs/branch/master
Git LFS is a command line extension and [specification](docs/spec.md) for
managing large files with Git. The client is written in Go, with pre-compiled

@ -72,15 +72,15 @@ func lockPath(file string) (string, error) {
abs := filepath.Join(wd, file)
path := strings.TrimPrefix(abs, repo)
path = strings.TrimPrefix(path, string(os.PathSeparator))
if stat, err := os.Stat(abs); err != nil {
return "", err
return path, err
} else {
if stat.IsDir() {
return "", fmt.Errorf("lfs: cannot lock directory: %s", file)
return path, fmt.Errorf("lfs: cannot lock directory: %s", file)
}
return path[1:], nil
return path, nil
}
}

@ -32,7 +32,7 @@ func locksCommand(cmd *cobra.Command, args []string) {
}
for _, lock := range locks {
Print("%s\t%s", lock.Path, lock.Committer)
Print("%s\t%s", lock.Path, lock.Owner)
lockCount++
}

@ -116,6 +116,10 @@ ArgsLoop:
for scanner.Scan() {
line := scanner.Text()
fields := strings.Fields(line)
if len(fields) < 1 {
continue
}
pattern := fields[0]
if newline, ok := changedAttribLines[pattern]; ok {
// Replace this line (newline already embedded)

@ -4,6 +4,8 @@ import (
"encoding/json"
"os"
"github.com/git-lfs/git-lfs/git"
"github.com/git-lfs/git-lfs/locking"
"github.com/spf13/cobra"
)
@ -27,15 +29,22 @@ func unlockCommand(cmd *cobra.Command, args []string) {
if len(args) != 0 {
path, err := lockPath(args[0])
if err != nil {
if err != nil && !unlockCmdFlags.Force {
Exit("Unable to determine path: %v", err.Error())
}
// This call can early-out
unlockAbortIfFileModified(path)
err = lockClient.UnlockFile(path, unlockCmdFlags.Force)
if err != nil {
Exit("Unable to unlock: %v", err.Error())
}
} else if unlockCmdFlags.Id != "" {
// This call can early-out
unlockAbortIfFileModifiedById(unlockCmdFlags.Id, lockClient)
err := lockClient.UnlockFileById(unlockCmdFlags.Id, unlockCmdFlags.Force)
if err != nil {
Exit("Unable to unlock %v: %v", unlockCmdFlags.Id, err.Error())
@ -55,6 +64,43 @@ func unlockCommand(cmd *cobra.Command, args []string) {
Print("'%s' was unlocked", args[0])
}
func unlockAbortIfFileModified(path string) {
modified, err := git.IsFileModified(path)
if err != nil {
Exit(err.Error())
}
if modified {
if unlockCmdFlags.Force {
// Only a warning
Error("Warning: unlocking with uncommitted changes because --force")
} else {
Exit("Cannot unlock file with uncommitted changes")
}
}
}
func unlockAbortIfFileModifiedById(id string, lockClient *locking.Client) {
// Get the path so we can check the status
filter := map[string]string{"id": id}
// try local cache first
locks, _ := lockClient.SearchLocks(filter, 0, true)
if len(locks) == 0 {
// Fall back on calling server
locks, _ = lockClient.SearchLocks(filter, 0, false)
}
if len(locks) == 0 {
// Don't block if we can't determine the path, may be cleaning up old data
return
}
unlockAbortIfFileModified(locks[0].Path)
}
func init() {
if !isCommandEnabled(cfg, "locks") {
return

@ -27,14 +27,14 @@ type uploadContext struct {
trackedLocksMu *sync.Mutex
// ALL verifiable locks
ourLocks map[string]locking.Lock
theirLocks map[string]locking.Lock
ourLocks map[string]*locking.Lock
theirLocks map[string]*locking.Lock
// locks from ourLocks that were modified in this push
ownedLocks []locking.Lock
ownedLocks []*locking.Lock
// locks from theirLocks that were modified in this push
unownedLocks []locking.Lock
unownedLocks []*locking.Lock
}
func newUploadContext(remote string, dryRun bool) *uploadContext {
@ -45,8 +45,8 @@ func newUploadContext(remote string, dryRun bool) *uploadContext {
Manifest: getTransferManifest(),
DryRun: dryRun,
uploadedOids: tools.NewStringSet(),
ourLocks: make(map[string]locking.Lock),
theirLocks: make(map[string]locking.Lock),
ourLocks: make(map[string]*locking.Lock),
theirLocks: make(map[string]*locking.Lock),
trackedLocksMu: new(sync.Mutex),
}
@ -61,10 +61,10 @@ func newUploadContext(remote string, dryRun bool) *uploadContext {
Error(" Temporarily skipping check ...")
} else {
for _, l := range theirLocks {
ctx.theirLocks[l.Path] = l
ctx.theirLocks[l.Path] = &l
}
for _, l := range ourLocks {
ctx.ourLocks[l.Path] = l
ctx.ourLocks[l.Path] = &l
}
}
@ -178,7 +178,7 @@ func (c *uploadContext) Await() {
Print("Unable to push %d locked file(s):", ul)
for _, unowned := range c.unownedLocks {
Print("* %s - %s", unowned.Path, unowned.Committer)
Print("* %s - %s", unowned.Path, unowned.Owner)
}
} else if len(c.ownedLocks) > 0 {
Print("Consider unlocking your own locked file(s): (`git lfs unlock <path>`)")

@ -14,7 +14,7 @@ var (
)
const (
Version = "1.5.0"
Version = "2.0-pre"
)
func init() {

@ -6,8 +6,15 @@ goes through looks like this:
1. [Discover the LFS Server to use](./server-discovery.md).
2. [Apply Authentication](./authentication.md).
3. [Request the Batch API](./batch.md) to upload or download objects.
4. The Batch API's response dictates how the client will transfer the objects.
3. Make the request. See the Batch and File Locking API sections.
## Batch API
The Batch API is used to request the ability to transfer LFS objects with the
LFS server.
API Specification:
* [Batch API](./batch.md)
Current transfer adapters include:
* [Basic](./basic-transfers.md)
@ -15,3 +22,11 @@ Current transfer adapters include:
Experimental transfer adapters include:
* Tus.io (upload only)
* [Custom](../custom-transfers.md)
## File Locking API
The File Locking API is used to create, list, and delete locks, as well as
verify that locks are respected in Git pushes.
API Specification:
* [File Locking API](./locking.md)

@ -1,5 +1,7 @@
# Git LFS Batch API
Added: v0.6
The Batch API is used to request the ability to transfer LFS objects with the
LFS server. The Batch URL is built by adding `/objects/batch` to the LFS server
URL.
@ -172,7 +174,7 @@ errors.
{
"message": "Not found",
"documentation_url": "https://git-lfs-server.com/docs/errors",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
@ -189,7 +191,7 @@ a custom header key so it does not trigger password prompts in browsers.
{
"message": "Credentials needed",
"documentation_url": "https://git-lfs-server.com/docs/errors",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```

436
docs/api/locking.md Normal file

@ -0,0 +1,436 @@
# Git LFS File Locking API
Added: v2.0
The File Locking API is used to create, list, and delete locks, as well as
verify that locks are respected in Git pushes. The locking URLs are built
by adding a suffix to the LFS Server URL.
Git remote: https://git-server.com/foo/bar
LFS server: https://git-server.com/foo/bar.git/info/lfs
Locks API: https://git-server.com/foo/bar.git/info/lfs/locks
See the [Server Discovery doc](./server-discovery.md) for more info on how LFS
builds the LFS server URL.
All File Locking requests require the following HTTP headers:
Accept: application/vnd.git-lfs+json
Content-Type: application/vnd.git-lfs+json
See the [Authentication doc](./authentication.md) for more info on how LFS
gets authorizes Batch API requests.
Note: This is the first version of the File Locking API, supporting only the
simplest use case: single branch locking. The API is designed to be extensible
as we experiment with more advanced locking scenarios, as defined in the
[original proposal](/docs/proposals/locking.md).
## Create Lock
The client sends the following to create a lock by sending a `POST` to `/locks`
(appended to the LFS server url, as described above).
* `path` - String path name of the file that is locked. This should be
relative to the root of the repository working directory.
```js
// POST https://lfs-server.com/locks
// Accept: application/vnd.git-lfs+json
// Content-Type: application/vnd.git-lfs+json
// Authorization: Basic ...
{
"path": "foo/bar.zip"
}
```
### Successful Response
Successful responses return the created lock:
* `id` - String ID of the Lock. Git LFS doesn't enforce what type of ID is used,
as long as it's returned a string.
* `path` - String path name of the locked file. This should be relative to the
root of the repository working directory.
* `locked_at` - The string ISO 8601 formatted timestamp the lock was created.
* `owner` - The name of the user that created the Lock. This should be set from
the user credentials posted when creating the lock.
```js
// HTTP/1.1 201 Created
// Content-Type: application/vnd.git-lfs+json
{
"lock": {
"id": "some-uuid",
"path": "/path/to/file",
"locked_at": "2016-05-17T15:49:06+00:00",
"owner": {
"name": "Jane Doe",
}
}
}
```
### Bad Response: Lock Exists
Lock services should reject lock creations if one already exists for the given
path on the current repository.
* `lock` - The existing Lock that clashes with the request.
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 409 Conflict
// Content-Type: application/vnd.git-lfs+json
{
"lock": {
// details of existing lock
},
"message": "already created lock",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
### Unauthorized Response
Lock servers should require that users have push access to the repository before
they can create locks.
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 403 Forbidden
// Content-Type: application/vnd.git-lfs+json
{
"message": "You must have push access to create a lock",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
### Error Response
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 500 Internal server error
// Content-Type: application/vnd.git-lfs+json
{
"message": "already created lock",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
## List Locks
The client can request the current active locks for a repository by sending a
`GET` to `/locks` (appended to the LFS server url, as described above). The
properties are sent as URI query values, instead of through a JSON body:
* `path` - Optional string path to match against locks on the server.
* `id` - Optional string ID to match against a lock on the server.
* `cursor` - The optional string value to continue listing locks. This value
should be the `next_cursor` from a previous request.
* `limit` - The integer limit of the number of locks to return. The server
should have its own upper and lower bounds on the supported limits.
```js
// GET https://lfs-server.com/locks?path=&id=&cursor=&limit=
// Accept: application/vnd.git-lfs+json
// Authorization: Basic ... (if needed)
```
### Successful Response
A successful response will list the matching locks:
* `locks` - Array of matching Lock objects. See the "Create Lock" successful
response section to see what Lock properties are possible.
* `next_cursor` - Optional string cursor that the server can return if there
are more locks matching the given filters. The client will re-do the request,
setting the `?cursor` query value with this `next_cursor` value.
Note: If the server has no locks, it must return an empty `locks` array.
```js
// HTTP/1.1 200 Ok
// Content-Type: application/vnd.git-lfs+json
{
"locks": [
{
"id": "some-uuid",
"path": "/path/to/file",
"locked_at": "2016-05-17T15:49:06+00:00",
"owner": {
"name": "Jane Doe"
}
}
],
"next_cursor": "optional next ID",
}
```
### Unauthorized Response
Lock servers should require that users have pull access to the repository before
they can list locks.
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 403 Forbidden
// Content-Type: application/vnd.git-lfs+json
{
"message": "You must have pull access to list locks",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
### Error Response
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 500 Internal server error
// Content-Type: application/vnd.git-lfs+json
{
"message": "unable to list locks",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
## List Locks for Verification
The client can use the Lock Verification endpoint to check for active locks
that can affect a Git push. For a caller, this endpoint is very similar to the
"List Locks" endpoint above, except:
* Verification requires a `POST` request.
* The `cursor` and `limit` values are sent as properties in the json request
body.
* The response includes locks partitioned into `ours` and `theirs` properties.
Clients send the following to list locks for verification by sending a `POST`
to `/locks/verify` (appended to the LFS server url, as described above):
* `cursor`
* `limit`
```js
// POST https://lfs-server.com/locks/verify
// Accept: application/vnd.git-lfs+json
// Content-Type: application/vnd.git-lfs+json
// Authorization: Basic ...
{
"cursor": "optional cursor",
"limit": 100 // also optional
}
```
Note: As more advanced locking workflows are implemented, more details will
likely be added to this request body in future iterations.
### Successful Response
A successful response will list the relevant locks:
* `ours` - Array of Lock objects currently owned by the authenticated user.
modify.
* `theirs` - Array of Lock objects currently owned by other users.
* `next_cursor` - Optional string cursor that the server can return if there
are more locks matching the given filters. The client will re-do the request,
setting the `cursor` property with this `next_cursor` value.
If a Git push updates any files matching any of "our" locks, Git LFS will list
them in the push output, in case the user will want to unlock them after the
push. However, any updated files matching one of "their" locks will halt the
push. At this point, it is up to the user to resolve the lock conflict with
their team.
Note: If the server has no locks, it must return an empty array in the `ours` or
`theirs` properties.
```js
// HTTP/1.1 200 Ok
// Content-Type: application/vnd.git-lfs+json
{
"ours": [
{
"id": "some-uuid",
"path": "/path/to/file",
"locked_at": "2016-05-17T15:49:06+00:00",
"owner": {
"name": "Jane Doe"
}
}
],
"theirs": [],
"next_cursor": "optional next ID",
}
```
### Not Found Response
By default, an LFS server that doesn't implement any locking endpoints should
return 404. This response will not halt any Git pushes.
Any 404 will do, but Git LFS will show a better error message with a json
response.
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 404 Not found
// Content-Type: application/vnd.git-lfs+json
{
"message": "Not found",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
### Unauthorized Response
Lock servers should require that users have push access to the repository before
they can get a list of locks to verify a Git push.
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 403 Forbidden
// Content-Type: application/vnd.git-lfs+json
{
"message": "You must have push access to verify locks",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
### Error Response
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 500 Internal server error
// Content-Type: application/vnd.git-lfs+json
{
"message": "unable to list locks",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
## Delete Lock
The client can delete a lock, given its ID, by sending a `POST` to
`/locks/:id/unlock` (appended to the LFS server url, as described above):
* `force` - Optional boolean specifying that the user is deleting another user's
lock.
```js
// POST https://lfs-server.com/locks/:id/unlock
// Accept: application/vnd.git-lfs+json
// Content-Type: application/vnd.git-lfs+json
// Authorization: Basic ...
{
"force": true
}
```
### Successful Response
Successful deletions return the deleted lock. See the "Create Lock" successful
response section to see what Lock properties are possible.
```js
// HTTP/1.1 200 Ok
// Content-Type: application/vnd.git-lfs+json
{
"lock": {
"id": "some-uuid",
"path": "/path/to/file",
"locked_at": "2016-05-17T15:49:06+00:00",
"owner": {
"name": "Jane Doe"
}
}
}
```
### Unauthorized Response
Lock servers should require that users have push access to the repository before
they can delete locks. Also, if the `force` parameter is omitted, or false,
the user should only be allowed to delete locks that they created.
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 403 Forbidden
// Content-Type: application/vnd.git-lfs+json
{
"message": "You must have push access to verify locks",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```
### Error response
* `message` - String error message.
* `request_id` - Optional String unique identifier for the request. Useful for
debugging.
* `documentation_url` - Optional String to give the user a place to report
errors.
```js
// HTTP/1.1 500 Internal server error
// Content-Type: application/vnd.git-lfs+json
{
"message": "already deleting lock",
"documentation_url": "https://lfs-server.com/docs/errors",
"request_id": "123"
}
```

@ -1,37 +0,0 @@
{
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Git LFS HTTPS Batch API Request",
"type": "object",
"properties": {
"transfers": {
"type": "array",
"items": {
"type": "string"
},
},
"operation": {
"type": "string"
},
"objects": {
"type": "array",
"items": {
"type": "object",
"properties": {
"oid": {
"type": "string"
},
"size": {
"type": "number",
"minimum": 0
},
"authenticated": {
"type": "boolean"
},
},
"required": ["oid", "size"],
"additionalProperties": false
}
}
},
"required": ["objects", "operation"]
}

@ -0,0 +1 @@
../../../tq/schemas/http-batch-request-schema.json

@ -1,83 +0,0 @@
{
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Git LFS HTTPS Batch API Response",
"type": "object",
"definitions": {
"action": {
"type": "object",
"properties": {
"href": {
"type": "string"
},
"header": {
"type": "object",
"additionalProperties": true
},
"expires_at": {
"type": "string"
}
},
"required": ["href"],
"additionalProperties": false
}
},
"properties": {
"transfer": {
"type": "string"
},
"objects": {
"type": "array",
"items": {
"type": "object",
"properties": {
"oid": {
"type": "string"
},
"size": {
"type": "number",
"minimum": 0
},
"authenticated": {
"type": "boolean"
},
"actions": {
"type": "object",
"properties": {
"download": { "$ref": "#/definitions/action" },
"upload": { "$ref": "#/definitions/action" },
"verify": { "$ref": "#/definitions/action" }
},
"additionalProperties": false
},
"error": {
"type": "object",
"properties": {
"code": {
"type": "number"
},
"message": {
"type": "string"
}
},
"required": ["code", "message"],
"additionalProperties": false
}
},
"required": ["oid", "size"],
"additionalProperties": false
}
},
"message": {
"type": "string"
},
"request_id": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
},
"required": ["objects"]
}

@ -0,0 +1 @@
../../../tq/schemas/http-batch-response-schema.json

@ -0,0 +1 @@
../../../locking/schemas/http-lock-create-request-schema.json

@ -0,0 +1 @@
../../../locking/schemas/http-lock-create-response-schema.json

@ -0,0 +1 @@
../../../locking/schemas/http-lock-delete-request-schema.json

@ -0,0 +1 @@
../../../locking/schemas/http-lock-list-response-schema.json

@ -0,0 +1 @@
../../../locking/schemas/http-lock-verify-response-schema.json

@ -17,6 +17,7 @@ import (
"sync"
"time"
lfserrors "github.com/git-lfs/git-lfs/errors"
"github.com/git-lfs/git-lfs/subprocess"
"github.com/rubyist/tracerx"
)
@ -1092,3 +1093,44 @@ func GetFilesChanged(from, to string) ([]string, error) {
return files, err
}
// IsFileModified returns whether the filepath specified is modified according
// to `git status`. A file is modified if it has uncommitted changes in the
// working copy or the index. This includes being untracked.
func IsFileModified(filepath string) (bool, error) {
args := []string{
"-c", "core.quotepath=false", // handle special chars in filenames
"status",
"--porcelain",
"--", // separator in case filename ambiguous
filepath,
}
cmd := subprocess.ExecCommand("git", args...)
outp, err := cmd.StdoutPipe()
if err != nil {
return false, lfserrors.Wrap(err, "Failed to call git status")
}
if err := cmd.Start(); err != nil {
return false, lfserrors.Wrap(err, "Failed to start git status")
}
matched := false
for scanner := bufio.NewScanner(outp); scanner.Scan(); {
line := scanner.Text()
// Porcelain format is "<I><W> <filename>"
// Where <I> = index status, <W> = working copy status
if len(line) > 3 {
// Double-check even though should be only match
if strings.TrimSpace(line[3:]) == filepath {
matched = true
// keep consuming output to exit cleanly
// will typically fall straight through anyway due to 1 line output
}
}
}
if err := cmd.Wait(); err != nil {
return false, lfserrors.Wrap(err, "Git status failed")
}
return matched, nil
}

13
glide.lock generated

@ -1,5 +1,5 @@
hash: f2f29e58b092d821d790461bd12a16239d6b8ca15db6c092fd884d82d3ef2010
updated: 2016-11-22T09:41:32.528134176-07:00
hash: 53affc5afb75c731ebb7be5683c55d635afefd093f90015ae2c62ea88130a4cc
updated: 2017-02-13T11:32:39.724259135-07:00
imports:
- name: github.com/bgentry/go-netrc
version: 9fd32a8b3d3d3f9d43c341bfe098430e07609480
@ -29,7 +29,6 @@ imports:
version: 6cb3b85ef5a0efef77caef88363ec4d4b5c0976d
subpackages:
- assert
- mock
- require
- name: github.com/ThomsonReutersEikon/go-ntlm
version: b00ec39bbdd04f845950f4dbb4fd0a2c3155e830
@ -41,15 +40,13 @@ imports:
- name: github.com/xeipuuv/gojsonreference
version: e02fc20de94c78484cd5ffb007f8af96be030a45
- name: github.com/xeipuuv/gojsonschema
version: d5336c75940ef31c9ceeb0ae64cf92944bccb4ee
version: 6b67b3fab74d992bd07f72550006ab2c6907c416
testImports:
- name: github.com/davecgh/go-spew
version: 346938d642f2ec3594ed81d874461961cd0faa76
version: 5215b55f46b2b919f50a1df0eaa5886afe4e3b3d
subpackages:
- spew
- name: github.com/pmezard/go-difflib
version: 792786c7400a136282c1664665ae0a8db921c6c2
version: d8ed2627bdf02c080bf22230dbb337003b7aba2d
subpackages:
- difflib
- name: github.com/stretchr/objx
version: 1a9d0bb9f541897e62256577b352fdbc1fb4fd94

@ -33,6 +33,6 @@ import:
- package: github.com/xeipuuv/gojsonreference
version: e02fc20de94c78484cd5ffb007f8af96be030a45
- package: github.com/xeipuuv/gojsonschema
version: d5336c75940ef31c9ceeb0ae64cf92944bccb4ee
version: 6b67b3fab74d992bd07f72550006ab2c6907c416
- package: github.com/pkg/errors
version: 01fa4104b9c248c8945d14d9f128454d5b28d595

@ -20,6 +20,16 @@ func IsHTTP(err error) (*http.Response, bool) {
return nil, false
}
func ClientErrorMessage(msg, docURL, reqID string) string {
if len(docURL) > 0 {
msg += "\nDocs: " + docURL
}
if len(reqID) > 0 {
msg += "\nRequest ID: " + reqID
}
return msg
}
type ClientError struct {
Message string `json:"message"`
DocumentationUrl string `json:"documentation_url,omitempty"`
@ -32,14 +42,7 @@ func (e *ClientError) HTTPResponse() *http.Response {
}
func (e *ClientError) Error() string {
msg := e.Message
if len(e.DocumentationUrl) > 0 {
msg += "\nDocs: " + e.DocumentationUrl
}
if len(e.RequestId) > 0 {
msg += "\nRequest ID: " + e.RequestId
}
return msg
return ClientErrorMessage(e.Message, e.DocumentationUrl, e.RequestId)
}
func (c *Client) handleResponse(res *http.Response) error {

@ -1,5 +0,0 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "not-a-type"
}

@ -1,7 +0,0 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "number",
"minimum": 0,
"exclusiveMinimum": false
}

@ -1,103 +0,0 @@
package schema
import (
"bytes"
"io"
"strings"
"sync/atomic"
"github.com/git-lfs/git-lfs/errors"
"github.com/xeipuuv/gojsonschema"
)
var (
// errValidationIncomplete is an error returned when `ValidationErr()`
// is called while the reader is still processing data.
errValidationIncomplete = errors.New("lfsapi/schema: validation incomplete")
)
// state represents the set of valid states a `*Reader` (see below) can be in
type state uint32
const (
// stateNotStarted means the `*Reader` has no processed any data
stateNotStarted state = iota
// stateProcessing means the `*Reader` has received a `Read()` call at
// least once, but has not received an `io.EOF` yet.
stateProcessing
// stateProcessed means the `*Reader` has received a `Read()` call at
// least once and has gotten an `io.EOF`, meaning there is no more data
// to process.
stateProcessed
)
type Reader struct {
// r is the underlying io.Reader this one is wrapping.
r io.Reader
// buf is the buffer of data read from the underlying reader
buf *bytes.Buffer
// schema is the *gojsonschema.Schema to valid the buffer against
schema *gojsonschema.Schema
// state is the current state that this `*Reader` is in, and is updated
// atomically through `atomic.SetUint32`, and etc.
state uint32
// result stores the result of the schema validation
result *gojsonschema.Result
// resultErr stores the (optional) error returned from the schema
// validation
resultErr error
}
var _ io.Reader = (*Reader)(nil)
// Read implements io.Reader.Read, and returns exactly the data received from
// the underlying reader.
//
// Read also sometimes advances state, as according to the valid instances of
// the `state` from above. If transitioning into the `stateProcessed` state, the
// schema will be validated.
func (r *Reader) Read(p []byte) (n int, err error) {
atomic.CompareAndSwapUint32(&r.state, uint32(stateNotStarted), uint32(stateProcessing))
n, err = r.r.Read(p)
if err == io.EOF {
got := gojsonschema.NewStringLoader(r.buf.String())
r.result, r.resultErr = r.schema.Validate(got)
atomic.CompareAndSwapUint32(&r.state, uint32(stateProcessing), uint32(stateProcessed))
}
return
}
// ValidationErr returns an error assosciated with validating the data. If
// there was an error performing the validation itself, that error will be
// returned with priority. If the validation has not started, or is incomplete,
// an appropriate error will be returned.
//
// Otherwise, if any validation errors were present, an error will be returned
// containing all of the validation errors. If the data passed validation, a
// value of 'nil' will be returned instead.
func (r *Reader) ValidationErr() error {
if r.resultErr != nil {
return r.resultErr
} else {
switch state(atomic.LoadUint32(&r.state)) {
case stateNotStarted, stateProcessing:
return errValidationIncomplete
}
}
if r.result.Valid() {
return nil
}
msg := "Validation errors:\n"
for _, e := range r.result.Errors() {
msg = strings.Join([]string{msg, e.Description()}, "\n")
}
return errors.New(msg)
}

@ -1,55 +0,0 @@
package schema
import (
"io"
"io/ioutil"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSchemaReaderWithValidPayload(t *testing.T) {
schema, err := FromJSON(ValidSchemaPath)
require.Nil(t, err)
r := schema.Reader(strings.NewReader("1"))
io.Copy(ioutil.Discard, r)
assert.Nil(t, r.ValidationErr())
}
func TestSchemaReaderWithInvalidPayload(t *testing.T) {
schema, err := FromJSON(ValidSchemaPath)
require.Nil(t, err)
r := schema.Reader(strings.NewReader("-1"))
io.Copy(ioutil.Discard, r)
assert.NotNil(t, r.ValidationErr())
}
func TestSchemaReaderBeforeValidation(t *testing.T) {
schema, err := FromJSON(ValidSchemaPath)
require.Nil(t, err)
r := schema.Reader(strings.NewReader("1"))
assert.Equal(t, errValidationIncomplete, r.ValidationErr())
}
func TestSchemaReaderDuringValidation(t *testing.T) {
schema, err := FromJSON(ValidSchemaPath)
require.Nil(t, err)
r := schema.Reader(strings.NewReader("12"))
var b [1]byte
n, err := r.Read(b[:])
assert.Equal(t, 1, n)
assert.Nil(t, err)
assert.Equal(t, errValidationIncomplete, r.ValidationErr())
}

@ -1,68 +0,0 @@
package schema
import (
"bytes"
"fmt"
"io"
"os"
"path/filepath"
"github.com/xeipuuv/gojsonschema"
)
// Schema holds a JSON schema to be used for validation against various
// payloads.
type Schema struct {
// s is the internal handle on the implementation of the JSON schema
// specification.
s *gojsonschema.Schema
}
// FromJSON constructs a new `*Schema` instance from the JSON schema at
// `schemaPath` relative to the package this code was called from.
//
// If the file could not be accessed, or was unable to be parsed as a valid JSON
// schema, an appropriate error will be returned. Otherwise, the `*Schema` will
// be returned with a nil error.
func FromJSON(schemaPath string) (*Schema, error) {
dir, err := os.Getwd()
if err != nil {
return nil, err
}
dir = filepath.ToSlash(dir)
schemaPath = filepath.Join(dir, schemaPath)
if _, err := os.Stat(schemaPath); err != nil {
return nil, err
}
schema, err := gojsonschema.NewSchema(gojsonschema.NewReferenceLoader(
// Platform compatibility: use "/" separators always for file://
fmt.Sprintf("file:///%s", filepath.ToSlash(schemaPath)),
))
if err != nil {
return nil, err
}
return &Schema{schema}, nil
}
// Reader wraps the given `io.Reader`, "r" as a `*schema.Reader`, allowing the
// contents passed through the reader to be inspected as conforming to the JSON
// schema or not.
//
// If the reader "r" already _is_ a `*schema.Reader`, it will be returned as-is.
func (s *Schema) Reader(r io.Reader) *Reader {
if sr, ok := r.(*Reader); ok {
return sr
}
rdr := &Reader{
buf: new(bytes.Buffer),
schema: s.s,
}
rdr.r = io.TeeReader(r, rdr.buf)
return rdr
}

@ -1,46 +0,0 @@
package schema
import (
"bytes"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
const (
ValidSchemaPath = "fixture/valid.json"
InvalidSchemaPath = "fixture/invalid.json"
MissingSchemaPath = "fixture/missing.json"
)
func TestCreatingAValidSchema(t *testing.T) {
_, err := FromJSON(ValidSchemaPath)
assert.Nil(t, err)
}
func TestCreatingAMissingSchema(t *testing.T) {
_, err := FromJSON(MissingSchemaPath)
assert.NotNil(t, err)
assert.True(t, os.IsNotExist(err))
}
func TestCreatingAnInvalidSchema(t *testing.T) {
_, err := FromJSON(InvalidSchemaPath)
assert.NotNil(t, err)
assert.Contains(t, err.Error(), "not-a-type is not a valid type")
}
func TestWrappingASchemaReader(t *testing.T) {
s, err := FromJSON(ValidSchemaPath)
require.Nil(t, err)
sr := s.Reader(new(bytes.Buffer))
wrapped := s.Reader(sr)
assert.Equal(t, sr, wrapped)
}

@ -17,12 +17,6 @@ type lockClient struct {
type lockRequest struct {
// Path is the path that the client would like to obtain a lock against.
Path string `json:"path"`
// LatestRemoteCommit is the SHA of the last known commit from the
// remote that we are trying to create the lock against, as found in
// `.git/refs/origin/<name>`.
LatestRemoteCommit string `json:"latest_remote_commit"`
// Committer is the individual that wishes to obtain the lock.
Committer *Committer `json:"committer"`
}
// LockResponse encapsulates the information sent over the API in response to
@ -41,12 +35,12 @@ type lockResponse struct {
// If an error was experienced in creating this lock, then the
// zero-value of Lock should be sent here instead.
Lock *Lock `json:"lock"`
// CommitNeeded holds the minimum commit SHA that client must have to
// obtain the lock.
CommitNeeded string `json:"commit_needed,omitempty"`
// Err is the optional error that was encountered while trying to create
// Message is the optional error that was encountered while trying to create
// the above lock.
Err string `json:"error,omitempty"`
Message string `json:"message,omitempty"`
DocumentationURL string `json:"documentation_url,omitempty"`
RequestID string `json:"request_id,omitempty"`
}
func (c *lockClient) Lock(remote string, lockReq *lockRequest) (*lockResponse, *http.Response, error) {
@ -67,9 +61,6 @@ func (c *lockClient) Lock(remote string, lockReq *lockRequest) (*lockResponse, *
// UnlockRequest encapsulates the data sent in an API request to remove a lock.
type unlockRequest struct {
// Id is the Id of the lock that the user wishes to unlock.
Id string `json:"id"`
// Force determines whether or not the lock should be "forcibly"
// unlocked; that is to say whether or not a given individual should be
// able to break a different individual's lock.
@ -83,15 +74,18 @@ type unlockResponse struct {
// `UnlockPayload` (see above). If no matching lock was found, this
// field will take the zero-value of Lock, and Err will be non-nil.
Lock *Lock `json:"lock"`
// Err is an optional field which holds any error that was experienced
// Message is an optional field which holds any error that was experienced
// while removing the lock.
Err string `json:"error,omitempty"`
Message string `json:"message,omitempty"`
DocumentationURL string `json:"documentation_url,omitempty"`
RequestID string `json:"request_id,omitempty"`
}
func (c *lockClient) Unlock(remote, id string, force bool) (*unlockResponse, *http.Response, error) {
e := c.Endpoints.Endpoint("upload", remote)
suffix := fmt.Sprintf("locks/%s/unlock", id)
req, err := c.NewRequest("POST", e, suffix, &unlockRequest{Id: id, Force: force})
req, err := c.NewRequest("POST", e, suffix, &unlockRequest{Force: force})
if err != nil {
return nil, nil, err
}
@ -158,10 +152,12 @@ type lockList struct {
// cursor to, if there are multiple pages of results for a particular
// `LockListRequest`.
NextCursor string `json:"next_cursor,omitempty"`
// Err populates any error that was encountered during the search. If no
// Message populates any error that was encountered during the search. If no
// error was encountered and the operation was succesful, then a value
// of nil will be passed here.
Err string `json:"error,omitempty"`
Message string `json:"message,omitempty"`
DocumentationURL string `json:"documentation_url,omitempty"`
RequestID string `json:"request_id,omitempty"`
}
func (c *lockClient) Search(remote string, searchReq *lockSearchRequest) (*lockList, *http.Response, error) {
@ -219,14 +215,16 @@ type lockVerifiableList struct {
// cursor to, if there are multiple pages of results for a particular
// `LockListRequest`.
NextCursor string `json:"next_cursor,omitempty"`
// Err populates any error that was encountered during the search. If no
// Message populates any error that was encountered during the search. If no
// error was encountered and the operation was succesful, then a value
// of nil will be passed here.
Err string `json:"error,omitempty"`
Message string `json:"message,omitempty"`
DocumentationURL string `json:"documentation_url,omitempty"`
RequestID string `json:"request_id,omitempty"`
}
func (c *lockClient) SearchVerifiable(remote string, vreq *lockVerifiableRequest) (*lockVerifiableList, *http.Response, error) {
e := c.Endpoints.Endpoint("download", remote)
e := c.Endpoints.Endpoint("upload", remote)
req, err := c.NewRequest("POST", e, "locks/verify", vreq)
if err != nil {
return nil, nil, err
@ -245,22 +243,18 @@ func (c *lockClient) SearchVerifiable(remote string, vreq *lockVerifiableRequest
return locks, res, err
}
// Committer represents a "First Last <email@domain.com>" pair.
type Committer struct {
// User represents the owner of a lock.
type User struct {
// Name is the name of the individual who would like to obtain the
// lock, for instance: "Rick Olson".
// lock, for instance: "Rick Sanchez".
Name string `json:"name"`
// Email is the email assopsicated with the individual who would
// like to obtain the lock, for instance: "rick@github.com".
Email string `json:"email"`
}
func NewCommitter(name, email string) *Committer {
return &Committer{Name: name, Email: email}
func NewUser(name string) *User {
return &User{Name: name}
}
// String implements the fmt.Stringer interface by returning a string
// representation of the Committer in the format "First Last <email>".
func (c *Committer) String() string {
return fmt.Sprintf("%s <%s>", c.Name, c.Email)
// String implements the fmt.Stringer interface.
func (u *User) String() string {
return u.Name
}

@ -2,16 +2,24 @@ package locking
import (
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"strings"
"testing"
"github.com/git-lfs/git-lfs/lfsapi"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/xeipuuv/gojsonschema"
)
func TestAPILock(t *testing.T) {
require.NotNil(t, createReqSchema)
require.NotNil(t, createResSchema)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/api/locks" {
w.WriteHeader(404)
@ -21,22 +29,26 @@ func TestAPILock(t *testing.T) {
assert.Equal(t, "POST", r.Method)
assert.Equal(t, lfsapi.MediaType, r.Header.Get("Accept"))
assert.Equal(t, lfsapi.MediaType, r.Header.Get("Content-Type"))
assert.Equal(t, "61", r.Header.Get("Content-Length"))
assert.Equal(t, "18", r.Header.Get("Content-Length"))
reqLoader, body := gojsonschema.NewReaderLoader(r.Body)
lockReq := &lockRequest{}
err := json.NewDecoder(r.Body).Decode(lockReq)
err := json.NewDecoder(body).Decode(lockReq)
r.Body.Close()
assert.Nil(t, err)
assert.Equal(t, "request", lockReq.Path)
assertSchema(t, createReqSchema, reqLoader)
w.Header().Set("Content-Type", "application/json")
err = json.NewEncoder(w).Encode(&lockResponse{
resLoader, resWriter := gojsonschema.NewWriterLoader(w)
err = json.NewEncoder(resWriter).Encode(&lockResponse{
Lock: &Lock{
Id: "1",
Path: "response",
},
})
assert.Nil(t, err)
assertSchema(t, createResSchema, resLoader)
}))
defer srv.Close()
@ -54,6 +66,9 @@ func TestAPILock(t *testing.T) {
}
func TestAPIUnlock(t *testing.T) {
require.NotNil(t, delReqSchema)
require.NotNil(t, createResSchema)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/api/locks/123/unlock" {
w.WriteHeader(404)
@ -64,21 +79,24 @@ func TestAPIUnlock(t *testing.T) {
assert.Equal(t, lfsapi.MediaType, r.Header.Get("Accept"))
assert.Equal(t, lfsapi.MediaType, r.Header.Get("Content-Type"))
reqLoader, body := gojsonschema.NewReaderLoader(r.Body)
unlockReq := &unlockRequest{}
err := json.NewDecoder(r.Body).Decode(unlockReq)
err := json.NewDecoder(body).Decode(unlockReq)
r.Body.Close()
assert.Nil(t, err)
assert.Equal(t, "123", unlockReq.Id)
assert.True(t, unlockReq.Force)
assertSchema(t, delReqSchema, reqLoader)
w.Header().Set("Content-Type", "application/json")
err = json.NewEncoder(w).Encode(&unlockResponse{
resLoader, resWriter := gojsonschema.NewWriterLoader(w)
err = json.NewEncoder(resWriter).Encode(&unlockResponse{
Lock: &Lock{
Id: "123",
Path: "response",
},
})
assert.Nil(t, err)
assertSchema(t, createResSchema, resLoader)
}))
defer srv.Close()
@ -96,6 +114,8 @@ func TestAPIUnlock(t *testing.T) {
}
func TestAPISearch(t *testing.T) {
require.NotNil(t, listResSchema)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/api/locks" {
w.WriteHeader(404)
@ -112,13 +132,15 @@ func TestAPISearch(t *testing.T) {
assert.Equal(t, "5", q.Get("limit"))
w.Header().Set("Content-Type", "application/json")
err := json.NewEncoder(w).Encode(&lockList{
resLoader, resWriter := gojsonschema.NewWriterLoader(w)
err := json.NewEncoder(resWriter).Encode(&lockList{
Locks: []Lock{
{Id: "1"},
{Id: "2"},
},
})
assert.Nil(t, err)
assertSchema(t, listResSchema, resLoader)
}))
defer srv.Close()
@ -143,6 +165,8 @@ func TestAPISearch(t *testing.T) {
}
func TestAPIVerifiableLocks(t *testing.T) {
require.NotNil(t, verifyResSchema)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/api/locks/verify" {
w.WriteHeader(404)
@ -160,7 +184,8 @@ func TestAPIVerifiableLocks(t *testing.T) {
}
w.Header().Set("Content-Type", "application/json")
err := json.NewEncoder(w).Encode(&lockVerifiableList{
resLoader, resWriter := gojsonschema.NewWriterLoader(w)
err := json.NewEncoder(resWriter).Encode(&lockVerifiableList{
Ours: []Lock{
{Id: "1"},
{Id: "2"},
@ -170,6 +195,7 @@ func TestAPIVerifiableLocks(t *testing.T) {
},
})
assert.Nil(t, err)
assertSchema(t, verifyResSchema, resLoader)
}))
defer srv.Close()
@ -191,3 +217,55 @@ func TestAPIVerifiableLocks(t *testing.T) {
assert.Equal(t, 1, len(locks.Theirs))
assert.Equal(t, "3", locks.Theirs[0].Id)
}
var (
createReqSchema *sourcedSchema
createResSchema *sourcedSchema
delReqSchema *sourcedSchema
listResSchema *sourcedSchema
verifyResSchema *sourcedSchema
)
func init() {
wd, err := os.Getwd()
if err != nil {
fmt.Println("getwd error:", err)
return
}
createReqSchema = getSchema(wd, "schemas/http-lock-create-request-schema.json")
createResSchema = getSchema(wd, "schemas/http-lock-create-response-schema.json")
delReqSchema = getSchema(wd, "schemas/http-lock-delete-request-schema.json")
listResSchema = getSchema(wd, "schemas/http-lock-list-response-schema.json")
verifyResSchema = getSchema(wd, "schemas/http-lock-verify-response-schema.json")
}
type sourcedSchema struct {
Source string
*gojsonschema.Schema
}
func getSchema(wd, relpath string) *sourcedSchema {
abspath := filepath.ToSlash(filepath.Join(wd, relpath))
s, err := gojsonschema.NewSchema(gojsonschema.NewReferenceLoader(fmt.Sprintf("file:///%s", abspath)))
if err != nil {
fmt.Printf("schema load error for %q: %+v\n", relpath, err)
}
return &sourcedSchema{Source: relpath, Schema: s}
}
func assertSchema(t *testing.T, schema *sourcedSchema, dataLoader gojsonschema.JSONLoader) {
res, err := schema.Validate(dataLoader)
if assert.Nil(t, err) {
if res.Valid() {
return
}
resErrors := res.Errors()
valErrors := make([]string, 0, len(resErrors))
for _, resErr := range resErrors {
valErrors = append(valErrors, resErr.String())
}
t.Errorf("Schema: %s\n%s", schema.Source, strings.Join(valErrors, "\n"))
}
}

@ -9,7 +9,6 @@ import (
"github.com/git-lfs/git-lfs/errors"
"github.com/git-lfs/git-lfs/filepathfilter"
"github.com/git-lfs/git-lfs/git"
"github.com/git-lfs/git-lfs/lfsapi"
"github.com/git-lfs/git-lfs/tools"
"github.com/git-lfs/git-lfs/tools/kv"
@ -89,25 +88,18 @@ func (c *Client) Close() error {
// path must be relative to the root of the repository
// Returns the lock id if successful, or an error
func (c *Client) LockFile(path string) (Lock, error) {
// TODO: this is not really the constraint we need to avoid merges, improve as per proposal
latest, err := git.CurrentRemoteRef()
if err != nil {
return Lock{}, err
}
lockReq := &lockRequest{
Path: path,
LatestRemoteCommit: latest.Sha,
Committer: NewCommitter(c.client.CurrentUser()),
}
lockRes, _, err := c.client.Lock(c.Remote, lockReq)
lockRes, _, err := c.client.Lock(c.Remote, &lockRequest{Path: path})
if err != nil {
return Lock{}, errors.Wrap(err, "api")
}
if len(lockRes.Err) > 0 {
return Lock{}, fmt.Errorf("Server unable to create lock: %v", lockRes.Err)
if len(lockRes.Message) > 0 {
return Lock{}, fmt.Errorf("Server unable to create lock: %s",
lfsapi.ClientErrorMessage(
lockRes.Message,
lockRes.DocumentationURL,
lockRes.RequestID,
))
}
lock := *lockRes.Lock
@ -153,8 +145,13 @@ func (c *Client) UnlockFileById(id string, force bool) error {
return errors.Wrap(err, "api")
}
if len(unlockRes.Err) > 0 {
return fmt.Errorf("Server unable to unlock: %s", unlockRes.Err)
if len(unlockRes.Message) > 0 {
return fmt.Errorf("Server unable to unlock: %s",
lfsapi.ClientErrorMessage(
unlockRes.Message,
unlockRes.DocumentationURL,
unlockRes.RequestID,
))
}
if err := c.cache.RemoveById(id); err != nil {
@ -172,9 +169,8 @@ type Lock struct {
// Path is an absolute path to the file that is locked as a part of this
// lock.
Path string `json:"path"`
// Committer is the identity of the person who holds the ownership of
// this lock.
Committer *Committer `json:"committer"`
// Owner is the identity of the user that created this lock.
Owner *User `json:"owner,omitempty"`
// LockedAt is the time at which this lock was acquired.
LockedAt time.Time `json:"locked_at"`
}
@ -203,8 +199,13 @@ func (c *Client) VerifiableLocks(limit int) (ourLocks, theirLocks []Lock, err er
return ourLocks, theirLocks, err
}
if list.Err != "" {
return ourLocks, theirLocks, errors.New(list.Err)
if list.Message != "" {
return ourLocks, theirLocks, fmt.Errorf("Server error searching locks: %s",
lfsapi.ClientErrorMessage(
list.Message,
list.DocumentationURL,
list.RequestID,
))
}
for _, l := range list.Ours {
@ -266,8 +267,13 @@ func (c *Client) searchRemoteLocks(filter map[string]string, limit int) ([]Lock,
return locks, errors.Wrap(err, "locking")
}
if list.Err != "" {
return locks, errors.Wrap(err, "locking")
if list.Message != "" {
return locks, fmt.Errorf("Server error searching for locks: %s",
lfsapi.ClientErrorMessage(
list.Message,
list.DocumentationURL,
list.RequestID,
))
}
for _, l := range list.Locks {
@ -318,33 +324,26 @@ func (c *Client) lockIdFromPath(path string) (string, error) {
}
}
// Fetch locked files for the current committer and cache them locally
// Fetch locked files for the current user and cache them locally
// This can be used to sync up locked files when moving machines
func (c *Client) refreshLockCache() error {
// TODO: filters don't seem to currently define how to search for a
// committer's email. Is it "committer.email"? For now, just iterate
locks, err := c.SearchLocks(nil, 0, false)
ourLocks, _, err := c.VerifiableLocks(0)
if err != nil {
return err
}
// We're going to overwrite the entire local cache
c.cache.Clear()
_, email := c.client.CurrentUser()
for _, l := range locks {
if l.Committer.Email == email {
c.cache.Add(l)
}
for _, l := range ourLocks {
c.cache.Add(l)
}
return nil
}
// IsFileLockedByCurrentCommitter returns whether a file is locked by the
// current committer, as cached locally
// current user, as cached locally
func (c *Client) IsFileLockedByCurrentCommitter(path string) bool {
filter := map[string]string{"path": path}
locks, err := c.searchCachedLocks(filter, 1)
if err != nil {

@ -26,17 +26,19 @@ func TestRefreshCache(t *testing.T) {
assert.Nil(t, err)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "GET", r.Method)
assert.Equal(t, "/api/locks", r.URL.Path)
assert.Equal(t, "POST", r.Method)
assert.Equal(t, "/api/locks/verify", r.URL.Path)
w.Header().Set("Content-Type", "application/json")
err = json.NewEncoder(w).Encode(lockList{
Locks: []Lock{
Lock{Id: "99", Path: "folder/test3.dat", Committer: &Committer{Name: "Alice", Email: "alice@wonderland.com"}},
Lock{Id: "101", Path: "folder/test1.dat", Committer: &Committer{Name: "Fred", Email: "fred@bloggs.com"}},
Lock{Id: "102", Path: "folder/test2.dat", Committer: &Committer{Name: "Fred", Email: "fred@bloggs.com"}},
Lock{Id: "103", Path: "root.dat", Committer: &Committer{Name: "Fred", Email: "fred@bloggs.com"}},
Lock{Id: "199", Path: "other/test1.dat", Committer: &Committer{Name: "Charles", Email: "charles@incharge.com"}},
err = json.NewEncoder(w).Encode(lockVerifiableList{
Theirs: []Lock{
Lock{Id: "99", Path: "folder/test3.dat", Owner: &User{Name: "Alice"}},
Lock{Id: "199", Path: "other/test1.dat", Owner: &User{Name: "Charles"}},
},
Ours: []Lock{
Lock{Id: "101", Path: "folder/test1.dat", Owner: &User{Name: "Fred"}},
Lock{Id: "102", Path: "folder/test2.dat", Owner: &User{Name: "Fred"}},
Lock{Id: "103", Path: "root.dat", Owner: &User{Name: "Fred"}},
},
})
assert.Nil(t, err)
@ -74,9 +76,9 @@ func TestRefreshCache(t *testing.T) {
// Sort locks for stable comparison
sort.Sort(LocksById(locks))
assert.Equal(t, []Lock{
Lock{Path: "folder/test1.dat", Id: "101", Committer: &Committer{Name: "Fred", Email: "fred@bloggs.com"}, LockedAt: zeroTime},
Lock{Path: "folder/test2.dat", Id: "102", Committer: &Committer{Name: "Fred", Email: "fred@bloggs.com"}, LockedAt: zeroTime},
Lock{Path: "root.dat", Id: "103", Committer: &Committer{Name: "Fred", Email: "fred@bloggs.com"}, LockedAt: zeroTime},
Lock{Path: "folder/test1.dat", Id: "101", Owner: &User{Name: "Fred"}, LockedAt: zeroTime},
Lock{Path: "folder/test2.dat", Id: "102", Owner: &User{Name: "Fred"}, LockedAt: zeroTime},
Lock{Path: "root.dat", Id: "103", Owner: &User{Name: "Fred"}, LockedAt: zeroTime},
}, locks)
}

@ -0,0 +1,11 @@
{
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Git LFS HTTPS Lock Creation API Request",
"type": "object",
"properties": {
"path": {
"type": "string"
}
},
"required": ["path"]
}

@ -0,0 +1,40 @@
{
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Git LFS HTTPS Lock Creation API Response",
"type": "object",
"properties": {
"lock": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"path": {
"type": "string"
},
"locked_at": {
"type": "string"
},
"owner": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
}
}
},
"required": ["id", "path"]
},
"message": {
"type": "string"
},
"request_id": {
"type": "string"
},
"documentation_url": {
"type": "string"
}
},
"required": ["lock"]
}

@ -0,0 +1,10 @@
{
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Git LFS HTTPS Lock Deletion API Request",
"type": "object",
"properties": {
"force": {
"type": "boolean"
}
}
}

@ -0,0 +1,36 @@
{
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Git LFS HTTPS Lock List API Response",
"type": "object",
"properties": {
"locks": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"path": {
"type": "string"
},
"locked_at": {
"type": "string"
},
"owner": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
}
}
}
}
},
"next_cursor": {
"type": "string"
}
},
"required": ["locks"]
}

@ -0,0 +1,50 @@
{
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Git LFS HTTPS Lock Verify API Response",
"type": "object",
"definitions": {
"lock": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"path": {
"type": "string"
},
"locked_at": {
"type": "string"
},
"owner": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
}
}
},
"required": ["id", "path"]
}
},
"properties": {
"ours": {
"type": "array",
"items": {
"$ref": "#/definitions/lock"
}
},
"theirs": {
"type": "array",
"items": {
"$ref": "#/definitions/lock"
}
},
"next_cursor": {
"type": "string"
}
},
"required": ["ours", "theirs"]
}

@ -757,46 +757,39 @@ func redirect307Handler(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(307)
}
type Committer struct {
Name string `json:"name"`
Email string `json:"email"`
type User struct {
Name string `json:"name"`
}
type Lock struct {
Id string `json:"id"`
Path string `json:"path"`
Committer Committer `json:"committer"`
CommitSHA string `json:"commit_sha"`
LockedAt time.Time `json:"locked_at"`
UnlockedAt time.Time `json:"unlocked_at,omitempty"`
Id string `json:"id"`
Path string `json:"path"`
Owner User `json:"owner"`
LockedAt time.Time `json:"locked_at"`
}
type LockRequest struct {
Path string `json:"path"`
LatestRemoteCommit string `json:"latest_remote_commit"`
Committer Committer `json:"committer"`
Path string `json:"path"`
}
type LockResponse struct {
Lock *Lock `json:"lock"`
CommitNeeded string `json:"commit_needed,omitempty"`
Err string `json:"error,omitempty"`
Lock *Lock `json:"lock"`
Message string `json:"message,omitempty"`
}
type UnlockRequest struct {
Id string `json:"id"`
Force bool `json:"force"`
Force bool `json:"force"`
}
type UnlockResponse struct {
Lock *Lock `json:"lock"`
Err string `json:"error,omitempty"`
Lock *Lock `json:"lock"`
Message string `json:"message,omitempty"`
}
type LockList struct {
Locks []Lock `json:"locks"`
NextCursor string `json:"next_cursor,omitempty"`
Err string `json:"error,omitempty"`
Message string `json:"message,omitempty"`
}
type VerifiableLockRequest struct {
@ -808,7 +801,7 @@ type VerifiableLockList struct {
Ours []Lock `json:"ours"`
Theirs []Lock `json:"theirs"`
NextCursor string `json:"next_cursor,omitempty"`
Err string `json:"error,omitempty"`
Message string `json:"message,omitempty"`
}
var (
@ -908,7 +901,10 @@ func (c LocksByCreatedAt) Len() int { return len(c) }
func (c LocksByCreatedAt) Less(i, j int) bool { return c[i].LockedAt.Before(c[j].LockedAt) }
func (c LocksByCreatedAt) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
var lockRe = regexp.MustCompile(`/locks/?$`)
var (
lockRe = regexp.MustCompile(`/locks/?$`)
unlockRe = regexp.MustCompile(`locks/([^/]+)/unlock\z`)
)
func locksHandler(w http.ResponseWriter, r *http.Request, repo string) {
dec := json.NewDecoder(r.Body)
@ -936,7 +932,7 @@ func locksHandler(w http.ResponseWriter, r *http.Request, repo string) {
r.FormValue("limit"))
if err != nil {
ll.Err = err.Error()
ll.Message = err.Error()
} else {
ll.Locks = locks
ll.NextCursor = nextCursor
@ -948,21 +944,25 @@ func locksHandler(w http.ResponseWriter, r *http.Request, repo string) {
w.Header().Set("Content-Type", "application/json")
if strings.HasSuffix(r.URL.Path, "unlock") {
var unlockRequest UnlockRequest
var lockId string
if matches := unlockRe.FindStringSubmatch(r.URL.Path); len(matches) > 1 {
lockId = matches[1]
}
if len(lockId) == 0 {
enc.Encode(&UnlockResponse{Message: "Invalid lock"})
}
if err := dec.Decode(&unlockRequest); err != nil {
enc.Encode(&UnlockResponse{
Err: err.Error(),
})
enc.Encode(&UnlockResponse{Message: err.Error()})
return
}
if l := delLock(repo, unlockRequest.Id); l != nil {
enc.Encode(&UnlockResponse{
Lock: l,
})
if l := delLock(repo, lockId); l != nil {
enc.Encode(&UnlockResponse{Lock: l})
} else {
enc.Encode(&UnlockResponse{
Err: "unable to find lock",
})
enc.Encode(&UnlockResponse{Message: "unable to find lock"})
}
return
}
@ -993,7 +993,7 @@ func locksHandler(w http.ResponseWriter, r *http.Request, repo string) {
reqBody.Cursor,
strconv.Itoa(reqBody.Limit))
if err != nil {
ll.Err = err.Error()
ll.Message = err.Error()
} else {
ll.NextCursor = nextCursor
@ -1013,16 +1013,12 @@ func locksHandler(w http.ResponseWriter, r *http.Request, repo string) {
if strings.HasSuffix(r.URL.Path, "/locks") {
var lockRequest LockRequest
if err := dec.Decode(&lockRequest); err != nil {
enc.Encode(&LockResponse{
Err: err.Error(),
})
enc.Encode(&LockResponse{Message: err.Error()})
}
for _, l := range getLocks(repo) {
if l.Path == lockRequest.Path {
enc.Encode(&LockResponse{
Err: "lock already created",
})
enc.Encode(&LockResponse{Message: "lock already created"})
return
}
}
@ -1031,11 +1027,10 @@ func locksHandler(w http.ResponseWriter, r *http.Request, repo string) {
rand.Read(id[:])
lock := &Lock{
Id: fmt.Sprintf("%x", id[:]),
Path: lockRequest.Path,
Committer: lockRequest.Committer,
CommitSHA: lockRequest.LatestRemoteCommit,
LockedAt: time.Now(),
Id: fmt.Sprintf("%x", id[:]),
Path: lockRequest.Path,
Owner: User{Name: "Git LFS Tests"},
LockedAt: time.Now(),
}
addLocks(repo, *lock)

@ -17,7 +17,7 @@ begin_test "list a single lock"
GITLFSLOCKSENABLED=1 git lfs locks --path "f.dat" | tee locks.log
grep "1 lock(s) matched query" locks.log
grep "f.dat" locks.log
grep "Git LFS Tests <git-lfs@example.com>" locks.log
grep "Git LFS Tests" locks.log
)
end_test
@ -35,7 +35,7 @@ begin_test "list a single lock (--json)"
GITLFSLOCKSENABLED=1 git lfs locks --json --path "f_json.dat" | tee locks.log
grep "\"path\":\"f_json.dat\"" locks.log
grep "\"committer\":{\"name\":\"Git LFS Tests\",\"email\":\"git-lfs@example.com\"}" locks.log
grep "\"owner\":{\"name\":\"Git LFS Tests\"}" locks.log
)
end_test

@ -529,10 +529,6 @@ begin_test "pre-push with their lock"
setup_remote_repo "$reponame"
clone_repo "$reponame" "$reponame"
# Use a different Git persona so the locks are owned by a different person
git config --local user.name "Example Locker"
git config --local user.email "locker@example.com"
git lfs track "*.dat"
git add .gitattributes
git commit -m "initial commit"
@ -563,7 +559,7 @@ begin_test "pre-push with their lock"
git push origin master 2>&1 | tee push.log
grep "Unable to push 1 locked file(s)" push.log
grep "* locked_theirs.dat - Example Locker <locker@example.com>" push.log
grep "* locked_theirs.dat - Git LFS Tests" push.log
popd >/dev/null
)
end_test

@ -12,6 +12,13 @@ begin_test "track"
cd track
git init
echo "###############################################################################
# Set default behavior to automatically normalize line endings.
###############################################################################
* text=auto
#*.cs diff=csharp" > .gitattributes
# track *.jpg once
git lfs track "*.jpg" | grep "Tracking \*.jpg"
assert_attributes_count "jpg" "filter=lfs" 1
@ -26,12 +33,18 @@ begin_test "track"
echo "*.gif filter=lfs -text" > a/.gitattributes
echo "*.png filter=lfs -text" > a/b/.gitattributes
out=$(git lfs track)
echo "$out" | grep "Listing tracked patterns"
echo "$out" | grep "*.mov ($(native_path_escaped ".git/info/attributes"))"
echo "$out" | grep "*.jpg (.gitattributes)"
echo "$out" | grep "*.gif ($(native_path_escaped "a/.gitattributes"))"
echo "$out" | grep "*.png ($(native_path_escaped "a/b/.gitattributes"))"
git lfs track | tee track.log
grep "Listing tracked patterns" track.log
grep "*.mov ($(native_path_escaped ".git/info/attributes"))" track.log
grep "*.jpg (.gitattributes)" track.log
grep "*.gif ($(native_path_escaped "a/.gitattributes"))" track.log
grep "*.png ($(native_path_escaped "a/b/.gitattributes"))" track.log
grep "Set default behavior" .gitattributes
grep "############" .gitattributes
grep "* text=auto" .gitattributes
grep "diff=csharp" .gitattributes
grep "*.jpg" .gitattributes
)
end_test

@ -19,6 +19,32 @@ begin_test "unlocking a lock by path"
)
end_test
begin_test "force unlocking lock with missing file"
(
set -e
reponame="force-unlock-missing-file"
setup_remote_repo_with_file "$reponame" "a.dat"
GITLFSLOCKSENABLED=1 git lfs lock "a.dat" | tee lock.log
id=$(grep -oh "\((.*)\)" lock.log | tr -d "()")
assert_server_lock "$reponame" "$id"
git rm a.dat
git commit -m "a.dat"
rm *.log *.json # ensure clean git status
git status
GITLFSLOCKSENABLED=1 git lfs unlock "a.dat" 2>&1 | tee unlock.log
grep "Unable to determine path" unlock.log
assert_server_lock "$reponame" "$id"
rm unlock.log
GITLFSLOCKSENABLED=1 git lfs unlock --force "a.dat" 2>&1 | tee unlock.log
refute_server_lock "$reponame" "$id"
)
end_test
begin_test "unlocking a lock (--json)"
(
set -e
@ -72,3 +98,83 @@ begin_test "unlocking a lock without sufficient info"
assert_server_lock "$reponame" "$id"
)
end_test
begin_test "unlocking a lock while uncommitted"
(
set -e
reponame="unlock_modified"
setup_remote_repo_with_file "$reponame" "mod.dat"
GITLFSLOCKSENABLED=1 git lfs lock "mod.dat" | tee lock.log
id=$(grep -oh "\((.*)\)" lock.log | tr -d "()")
assert_server_lock "$reponame" "$id"
echo "\nSomething" >> mod.dat
GITLFSLOCKSENABLED=1 git lfs unlock "mod.dat" 2>&1 | tee unlock.log
[ ${PIPESTATUS[0]} -ne "0" ]
grep "Cannot unlock file with uncommitted changes" unlock.log
assert_server_lock "$reponame" "$id"
# should allow after discard
git checkout mod.dat
GITLFSLOCKSENABLED=1 git lfs unlock "mod.dat" 2>&1 | tee unlock.log
refute_server_lock "$reponame" "$id"
)
end_test
begin_test "unlocking a lock while uncommitted with --force"
(
set -e
reponame="unlock_modified_force"
setup_remote_repo_with_file "$reponame" "modforce.dat"
GITLFSLOCKSENABLED=1 git lfs lock "modforce.dat" | tee lock.log
id=$(grep -oh "\((.*)\)" lock.log | tr -d "()")
assert_server_lock "$reponame" "$id"
echo "\nSomething" >> modforce.dat
# should allow with --force
GITLFSLOCKSENABLED=1 git lfs unlock --force "modforce.dat" 2>&1 | tee unlock.log
grep "Warning: unlocking with uncommitted changes" unlock.log
refute_server_lock "$reponame" "$id"
)
end_test
begin_test "unlocking a lock while untracked"
(
set -e
reponame="unlock_untracked"
setup_remote_repo_with_file "$reponame" "notrelevant.dat"
git lfs track "*.dat"
# Create file but don't add it to git
# Shouldn't be able to unlock it
echo "something" > untracked.dat
GITLFSLOCKSENABLED=1 git lfs lock "untracked.dat" | tee lock.log
id=$(grep -oh "\((.*)\)" lock.log | tr -d "()")
assert_server_lock "$reponame" "$id"
GITLFSLOCKSENABLED=1 git lfs unlock "untracked.dat" 2>&1 | tee unlock.log
[ ${PIPESTATUS[0]} -ne "0" ]
grep "Cannot unlock file with uncommitted changes" unlock.log
assert_server_lock "$reponame" "$id"
# should allow after add/commit
git add untracked.dat
git commit -m "Added untracked"
GITLFSLOCKSENABLED=1 git lfs unlock "untracked.dat" 2>&1 | tee unlock.log
refute_server_lock "$reponame" "$id"
)
end_test

@ -2,16 +2,24 @@ package tq
import (
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"strings"
"testing"
"github.com/git-lfs/git-lfs/lfsapi"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/xeipuuv/gojsonschema"
)
func TestAPIBatch(t *testing.T) {
require.NotNil(t, batchReqSchema, batchReqSchema.Source)
require.NotNil(t, batchResSchema, batchResSchema.Source)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/api/objects/batch" {
w.WriteHeader(404)
@ -21,20 +29,28 @@ func TestAPIBatch(t *testing.T) {
assert.Equal(t, "POST", r.Method)
assert.Equal(t, "80", r.Header.Get("Content-Length"))
bodyLoader, body := gojsonschema.NewReaderLoader(r.Body)
bReq := &batchRequest{}
err := json.NewDecoder(r.Body).Decode(bReq)
err := json.NewDecoder(body).Decode(bReq)
r.Body.Close()
assert.Nil(t, err)
assertSchema(t, batchReqSchema, bodyLoader)
assert.EqualValues(t, []string{"basic", "whatev"}, bReq.TransferAdapterNames)
if assert.Equal(t, 1, len(bReq.Objects)) {
assert.Equal(t, "a", bReq.Objects[0].Oid)
}
w.Header().Set("Content-Type", "application/json")
err = json.NewEncoder(w).Encode(&BatchResponse{
writeLoader, resWriter := gojsonschema.NewWriterLoader(w)
err = json.NewEncoder(resWriter).Encode(&BatchResponse{
TransferAdapterName: "basic",
Objects: bReq.Objects,
})
assert.Nil(t, err)
assertSchema(t, batchResSchema, writeLoader)
}))
defer srv.Close()
@ -59,6 +75,9 @@ func TestAPIBatch(t *testing.T) {
}
func TestAPIBatchOnlyBasic(t *testing.T) {
require.NotNil(t, batchReqSchema, batchReqSchema.Source)
require.NotNil(t, batchResSchema, batchResSchema.Source)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/api/objects/batch" {
w.WriteHeader(404)
@ -67,19 +86,27 @@ func TestAPIBatchOnlyBasic(t *testing.T) {
assert.Equal(t, "POST", r.Method)
bodyLoader, body := gojsonschema.NewReaderLoader(r.Body)
bReq := &batchRequest{}
err := json.NewDecoder(r.Body).Decode(bReq)
err := json.NewDecoder(body).Decode(bReq)
r.Body.Close()
assert.Nil(t, err)
assertSchema(t, batchReqSchema, bodyLoader)
assert.Equal(t, 0, len(bReq.TransferAdapterNames))
if assert.Equal(t, 1, len(bReq.Objects)) {
assert.Equal(t, "a", bReq.Objects[0].Oid)
}
w.Header().Set("Content-Type", "application/json")
err = json.NewEncoder(w).Encode(&BatchResponse{
writeLoader, resWriter := gojsonschema.NewWriterLoader(w)
err = json.NewEncoder(resWriter).Encode(&BatchResponse{
TransferAdapterName: "basic",
Objects: make([]*Transfer, 0),
})
assert.Nil(t, err)
assertSchema(t, batchResSchema, writeLoader)
}))
defer srv.Close()
@ -113,3 +140,49 @@ func TestAPIBatchEmptyObjects(t *testing.T) {
assert.Equal(t, "", bRes.TransferAdapterName)
assert.Equal(t, 0, len(bRes.Objects))
}
var (
batchReqSchema *sourcedSchema
batchResSchema *sourcedSchema
)
func init() {
wd, err := os.Getwd()
if err != nil {
fmt.Println("getwd error:", err)
return
}
batchReqSchema = getSchema(wd, "schemas/http-batch-request-schema.json")
batchResSchema = getSchema(wd, "schemas/http-batch-response-schema.json")
}
type sourcedSchema struct {
Source string
*gojsonschema.Schema
}
func getSchema(wd, relpath string) *sourcedSchema {
abspath := filepath.ToSlash(filepath.Join(wd, relpath))
s, err := gojsonschema.NewSchema(gojsonschema.NewReferenceLoader(fmt.Sprintf("file:///%s", abspath)))
if err != nil {
fmt.Printf("schema load error for %q: %+v\n", relpath, err)
}
return &sourcedSchema{Source: relpath, Schema: s}
}
func assertSchema(t *testing.T, schema *sourcedSchema, dataLoader gojsonschema.JSONLoader) {
res, err := schema.Validate(dataLoader)
if assert.Nil(t, err) {
if res.Valid() {
return
}
resErrors := res.Errors()
valErrors := make([]string, 0, len(resErrors))
for _, resErr := range resErrors {
valErrors = append(valErrors, resErr.String())
}
t.Errorf("Schema: %s\n%s", schema.Source, strings.Join(valErrors, "\n"))
}
}

@ -0,0 +1,37 @@
{
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Git LFS HTTPS Batch API Request",
"type": "object",
"properties": {
"transfers": {
"type": "array",
"items": {
"type": "string"
}
},
"operation": {
"type": "string"
},
"objects": {
"type": "array",
"items": {
"type": "object",
"properties": {
"oid": {
"type": "string"
},
"size": {
"type": "number",
"minimum": 0
},
"authenticated": {
"type": "boolean"
}
},
"required": ["oid", "size"],
"additionalProperties": false
}
}
},
"required": ["objects", "operation"]
}

@ -0,0 +1,83 @@
{
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Git LFS HTTPS Batch API Response",
"type": "object",
"definitions": {
"action": {
"type": "object",
"properties": {
"href": {
"type": "string"
},
"header": {
"type": "object",
"additionalProperties": true
},
"expires_at": {
"type": "string"
}
},
"required": ["href"],
"additionalProperties": false
}
},
"properties": {
"transfer": {
"type": "string"
},
"objects": {
"type": "array",
"items": {
"type": "object",
"properties": {
"oid": {
"type": "string"
},
"size": {
"type": "number",
"minimum": 0
},
"authenticated": {
"type": "boolean"
},
"actions": {
"type": "object",
"properties": {
"download": { "$ref": "#/definitions/action" },
"upload": { "$ref": "#/definitions/action" },
"verify": { "$ref": "#/definitions/action" }
},
"additionalProperties": false
},
"error": {
"type": "object",
"properties": {
"code": {
"type": "number"
},
"message": {
"type": "string"
}
},
"required": ["code", "message"],
"additionalProperties": false
}
},
"required": ["oid", "size"],
"additionalProperties": false
}
},
"message": {
"type": "string"
},
"request_id": {
"type": "string"
},
"documentation_url": {
"type": "string"
}
},
"required": ["objects"]
}

@ -197,9 +197,9 @@ Note: An error of RequiredType has an err.Type() return value of "required"
**err.Details()**: *gojsonschema.ErrorDetails* Returns a map[string]interface{} of additional error details specific to the error. For example, GTE errors will have a "min" value, LTE will have a "max" value. See errors.go for a full description of all the error details. Every error always contains a "field" key that holds the value of *err.Field()*
Note in most cases, the err.Details() will be used to generate replacement strings in your locales. and not used directly i.e.
Note in most cases, the err.Details() will be used to generate replacement strings in your locales, and not used directly. These strings follow the text/template format i.e.
```
%field% must be greater than or equal to %min%
{{.field}} must be greater than or equal to {{.min}}
```
## Formats

@ -1,10 +1,20 @@
package gojsonschema
import (
"fmt"
"strings"
"bytes"
"sync"
"text/template"
)
var errorTemplates errorTemplate = errorTemplate{template.New("errors-new"),sync.RWMutex{}}
// template.Template is not thread-safe for writing, so some locking is done
// sync.RWMutex is used for efficiently locking when new templates are created
type errorTemplate struct {
*template.Template
sync.RWMutex
}
type (
// RequiredError. ErrorDetails: property string
RequiredError struct {
@ -230,13 +240,35 @@ func newError(err ResultError, context *jsonContext, value interface{}, locale l
err.SetDescription(formatErrorDescription(d, details))
}
// formatErrorDescription takes a string in this format: %field% is required
// and converts it to a string with replacements. The fields come from
// the ErrorDetails struct and vary for each type of error.
// formatErrorDescription takes a string in the default text/template
// format and converts it to a string with replacements. The fields come
// from the ErrorDetails struct and vary for each type of error.
func formatErrorDescription(s string, details ErrorDetails) string {
for name, val := range details {
s = strings.Replace(s, "%"+strings.ToLower(name)+"%", fmt.Sprintf("%v", val), -1)
var tpl *template.Template
var descrAsBuffer bytes.Buffer
var err error
errorTemplates.RLock()
tpl = errorTemplates.Lookup(s)
errorTemplates.RUnlock()
if tpl == nil {
errorTemplates.Lock()
tpl = errorTemplates.New(s)
tpl, err = tpl.Parse(s)
errorTemplates.Unlock()
if err != nil {
return err.Error()
}
}
return s
err = tpl.Execute(&descrAsBuffer, details)
if err != nil {
return err.Error()
}
return descrAsBuffer.String()
}

@ -60,6 +60,9 @@ type (
// UUIDFormatChecker validates a UUID is in the correct format
UUIDFormatChecker struct{}
// RegexFormatChecker validates a regex is in the correct format
RegexFormatChecker struct{}
)
var (
@ -74,6 +77,7 @@ var (
"ipv6": IPV6FormatChecker{},
"uri": URIFormatChecker{},
"uuid": UUIDFormatChecker{},
"regex": RegexFormatChecker{},
},
}
@ -176,3 +180,15 @@ func (f HostnameFormatChecker) IsFormat(input string) bool {
func (f UUIDFormatChecker) IsFormat(input string) bool {
return rxUUID.MatchString(input)
}
// IsFormat implements FormatChecker interface.
func (f RegexFormatChecker) IsFormat(input string) bool {
if input == "" {
return true
}
_, err := regexp.Compile(input)
if err != nil {
return false
}
return true
}

@ -46,9 +46,35 @@ var osFS = osFileSystem(os.Open)
// JSON loader interface
type JSONLoader interface {
jsonSource() interface{}
loadJSON() (interface{}, error)
loadSchema() (*Schema, error)
JsonSource() interface{}
LoadJSON() (interface{}, error)
JsonReference() (gojsonreference.JsonReference, error)
LoaderFactory() JSONLoaderFactory
}
type JSONLoaderFactory interface {
New(source string) JSONLoader
}
type DefaultJSONLoaderFactory struct {
}
type FileSystemJSONLoaderFactory struct {
fs http.FileSystem
}
func (d DefaultJSONLoaderFactory) New(source string) JSONLoader {
return &jsonReferenceLoader{
fs: osFS,
source: source,
}
}
func (f FileSystemJSONLoaderFactory) New(source string) JSONLoader {
return &jsonReferenceLoader{
fs: f.fs,
source: source,
}
}
// osFileSystem is a functional wrapper for os.Open that implements http.FileSystem.
@ -66,10 +92,20 @@ type jsonReferenceLoader struct {
source string
}
func (l *jsonReferenceLoader) jsonSource() interface{} {
func (l *jsonReferenceLoader) JsonSource() interface{} {
return l.source
}
func (l *jsonReferenceLoader) JsonReference() (gojsonreference.JsonReference, error) {
return gojsonreference.NewJsonReference(l.JsonSource().(string))
}
func (l *jsonReferenceLoader) LoaderFactory() JSONLoaderFactory {
return &FileSystemJSONLoaderFactory{
fs: l.fs,
}
}
// NewReferenceLoader returns a JSON reference loader using the given source and the local OS file system.
func NewReferenceLoader(source string) *jsonReferenceLoader {
return &jsonReferenceLoader{
@ -86,11 +122,11 @@ func NewReferenceLoaderFileSystem(source string, fs http.FileSystem) *jsonRefere
}
}
func (l *jsonReferenceLoader) loadJSON() (interface{}, error) {
func (l *jsonReferenceLoader) LoadJSON() (interface{}, error) {
var err error
reference, err := gojsonreference.NewJsonReference(l.jsonSource().(string))
reference, err := gojsonreference.NewJsonReference(l.JsonSource().(string))
if err != nil {
return nil, err
}
@ -102,7 +138,7 @@ func (l *jsonReferenceLoader) loadJSON() (interface{}, error) {
if reference.HasFileScheme {
filename := strings.Replace(refToUrl.String(), "file://", "", -1)
filename := strings.Replace(refToUrl.GetUrl().Path, "file://", "", -1)
if runtime.GOOS == "windows" {
// on Windows, a file URL may have an extra leading slash, use slashes
// instead of backslashes, and have spaces escaped
@ -110,7 +146,6 @@ func (l *jsonReferenceLoader) loadJSON() (interface{}, error) {
filename = filename[1:]
}
filename = filepath.FromSlash(filename)
filename = strings.Replace(filename, "%20", " ", -1)
}
document, err = l.loadFromFile(filename)
@ -131,33 +166,6 @@ func (l *jsonReferenceLoader) loadJSON() (interface{}, error) {
}
func (l *jsonReferenceLoader) loadSchema() (*Schema, error) {
var err error
d := Schema{}
d.pool = newSchemaPool(l.fs)
d.referencePool = newSchemaReferencePool()
d.documentReference, err = gojsonreference.NewJsonReference(l.jsonSource().(string))
if err != nil {
return nil, err
}
spd, err := d.pool.GetDocument(d.documentReference)
if err != nil {
return nil, err
}
err = d.parse(spd.Document)
if err != nil {
return nil, err
}
return &d, nil
}
func (l *jsonReferenceLoader) loadFromHTTP(address string) (interface{}, error) {
resp, err := http.Get(address)
@ -201,45 +209,52 @@ type jsonStringLoader struct {
source string
}
func (l *jsonStringLoader) jsonSource() interface{} {
func (l *jsonStringLoader) JsonSource() interface{} {
return l.source
}
func (l *jsonStringLoader) JsonReference() (gojsonreference.JsonReference, error) {
return gojsonreference.NewJsonReference("#")
}
func (l *jsonStringLoader) LoaderFactory() JSONLoaderFactory {
return &DefaultJSONLoaderFactory{}
}
func NewStringLoader(source string) *jsonStringLoader {
return &jsonStringLoader{source: source}
}
func (l *jsonStringLoader) loadJSON() (interface{}, error) {
func (l *jsonStringLoader) LoadJSON() (interface{}, error) {
return decodeJsonUsingNumber(strings.NewReader(l.jsonSource().(string)))
return decodeJsonUsingNumber(strings.NewReader(l.JsonSource().(string)))
}
func (l *jsonStringLoader) loadSchema() (*Schema, error) {
// JSON bytes loader
var err error
type jsonBytesLoader struct {
source []byte
}
document, err := l.loadJSON()
if err != nil {
return nil, err
}
func (l *jsonBytesLoader) JsonSource() interface{} {
return l.source
}
d := Schema{}
d.pool = newSchemaPool(osFS)
d.referencePool = newSchemaReferencePool()
d.documentReference, err = gojsonreference.NewJsonReference("#")
d.pool.SetStandaloneDocument(document)
if err != nil {
return nil, err
}
func (l *jsonBytesLoader) JsonReference() (gojsonreference.JsonReference, error) {
return gojsonreference.NewJsonReference("#")
}
err = d.parse(document)
if err != nil {
return nil, err
}
func (l *jsonBytesLoader) LoaderFactory() JSONLoaderFactory {
return &DefaultJSONLoaderFactory{}
}
return &d, nil
func NewBytesLoader(source []byte) *jsonBytesLoader {
return &jsonBytesLoader{source: source}
}
func (l *jsonBytesLoader) LoadJSON() (interface{}, error) {
return decodeJsonUsingNumber(bytes.NewReader(l.JsonSource().([]byte)))
}
// JSON Go (types) loader
@ -249,19 +264,27 @@ type jsonGoLoader struct {
source interface{}
}
func (l *jsonGoLoader) jsonSource() interface{} {
func (l *jsonGoLoader) JsonSource() interface{} {
return l.source
}
func (l *jsonGoLoader) JsonReference() (gojsonreference.JsonReference, error) {
return gojsonreference.NewJsonReference("#")
}
func (l *jsonGoLoader) LoaderFactory() JSONLoaderFactory {
return &DefaultJSONLoaderFactory{}
}
func NewGoLoader(source interface{}) *jsonGoLoader {
return &jsonGoLoader{source: source}
}
func (l *jsonGoLoader) loadJSON() (interface{}, error) {
func (l *jsonGoLoader) LoadJSON() (interface{}, error) {
// convert it to a compliant JSON first to avoid types "mismatches"
jsonBytes, err := json.Marshal(l.jsonSource())
jsonBytes, err := json.Marshal(l.JsonSource())
if err != nil {
return nil, err
}
@ -270,31 +293,34 @@ func (l *jsonGoLoader) loadJSON() (interface{}, error) {
}
func (l *jsonGoLoader) loadSchema() (*Schema, error) {
type jsonIOLoader struct {
buf *bytes.Buffer
}
var err error
func NewReaderLoader(source io.Reader) (*jsonIOLoader, io.Reader) {
buf := &bytes.Buffer{}
return &jsonIOLoader{buf: buf}, io.TeeReader(source, buf)
}
document, err := l.loadJSON()
if err != nil {
return nil, err
}
func NewWriterLoader(source io.Writer) (*jsonIOLoader, io.Writer) {
buf := &bytes.Buffer{}
return &jsonIOLoader{buf: buf}, io.MultiWriter(source, buf)
}
d := Schema{}
d.pool = newSchemaPool(osFS)
d.referencePool = newSchemaReferencePool()
d.documentReference, err = gojsonreference.NewJsonReference("#")
d.pool.SetStandaloneDocument(document)
if err != nil {
return nil, err
}
func (l *jsonIOLoader) JsonSource() interface{} {
return l.buf.String()
}
err = d.parse(document)
if err != nil {
return nil, err
}
func (l *jsonIOLoader) LoadJSON() (interface{}, error) {
return decodeJsonUsingNumber(l.buf)
}
return &d, nil
func (l *jsonIOLoader) JsonReference() (gojsonreference.JsonReference, error) {
return gojsonreference.NewJsonReference("#")
}
func (l *jsonIOLoader) LoaderFactory() JSONLoaderFactory {
return &DefaultJSONLoaderFactory{}
}
func decodeJsonUsingNumber(r io.Reader) (interface{}, error) {

@ -37,6 +37,7 @@ type (
MissingDependency() string
Internal() string
Enum() string
ArrayNotEnoughItems() string
ArrayNoAdditionalItems() string
ArrayMinItems() string
ArrayMaxItems() string
@ -83,11 +84,11 @@ type (
)
func (l DefaultLocale) Required() string {
return `%property% is required`
return `{{.property}} is required`
}
func (l DefaultLocale) InvalidType() string {
return `Invalid type. Expected: %expected%, given: %given%`
return `Invalid type. Expected: {{.expected}}, given: {{.given}}`
}
func (l DefaultLocale) NumberAnyOf() string {
@ -107,157 +108,161 @@ func (l DefaultLocale) NumberNot() string {
}
func (l DefaultLocale) MissingDependency() string {
return `Has a dependency on %dependency%`
return `Has a dependency on {{.dependency}}`
}
func (l DefaultLocale) Internal() string {
return `Internal Error %error%`
return `Internal Error {{.error}}`
}
func (l DefaultLocale) Enum() string {
return `%field% must be one of the following: %allowed%`
return `{{.field}} must be one of the following: {{.allowed}}`
}
func (l DefaultLocale) ArrayNoAdditionalItems() string {
return `No additional items allowed on array`
}
func (l DefaultLocale) ArrayNotEnoughItems() string {
return `Not enough items on array to match positional list of schema`
}
func (l DefaultLocale) ArrayMinItems() string {
return `Array must have at least %min% items`
return `Array must have at least {{.min}} items`
}
func (l DefaultLocale) ArrayMaxItems() string {
return `Array must have at most %max% items`
return `Array must have at most {{.max}} items`
}
func (l DefaultLocale) Unique() string {
return `%type% items must be unique`
return `{{.type}} items must be unique`
}
func (l DefaultLocale) ArrayMinProperties() string {
return `Must have at least %min% properties`
return `Must have at least {{.min}} properties`
}
func (l DefaultLocale) ArrayMaxProperties() string {
return `Must have at most %max% properties`
return `Must have at most {{.max}} properties`
}
func (l DefaultLocale) AdditionalPropertyNotAllowed() string {
return `Additional property %property% is not allowed`
return `Additional property {{.property}} is not allowed`
}
func (l DefaultLocale) InvalidPropertyPattern() string {
return `Property "%property%" does not match pattern %pattern%`
return `Property "{{.property}}" does not match pattern {{.pattern}}`
}
func (l DefaultLocale) StringGTE() string {
return `String length must be greater than or equal to %min%`
return `String length must be greater than or equal to {{.min}}`
}
func (l DefaultLocale) StringLTE() string {
return `String length must be less than or equal to %max%`
return `String length must be less than or equal to {{.max}}`
}
func (l DefaultLocale) DoesNotMatchPattern() string {
return `Does not match pattern '%pattern%'`
return `Does not match pattern '{{.pattern}}'`
}
func (l DefaultLocale) DoesNotMatchFormat() string {
return `Does not match format '%format%'`
return `Does not match format '{{.format}}'`
}
func (l DefaultLocale) MultipleOf() string {
return `Must be a multiple of %multiple%`
return `Must be a multiple of {{.multiple}}`
}
func (l DefaultLocale) NumberGTE() string {
return `Must be greater than or equal to %min%`
return `Must be greater than or equal to {{.min}}`
}
func (l DefaultLocale) NumberGT() string {
return `Must be greater than %min%`
return `Must be greater than {{.min}}`
}
func (l DefaultLocale) NumberLTE() string {
return `Must be less than or equal to %max%`
return `Must be less than or equal to {{.max}}`
}
func (l DefaultLocale) NumberLT() string {
return `Must be less than %max%`
return `Must be less than {{.max}}`
}
// Schema validators
func (l DefaultLocale) RegexPattern() string {
return `Invalid regex pattern '%pattern%'`
return `Invalid regex pattern '{{.pattern}}'`
}
func (l DefaultLocale) GreaterThanZero() string {
return `%number% must be strictly greater than 0`
return `{{.number}} must be strictly greater than 0`
}
func (l DefaultLocale) MustBeOfA() string {
return `%x% must be of a %y%`
return `{{.x}} must be of a {{.y}}`
}
func (l DefaultLocale) MustBeOfAn() string {
return `%x% must be of an %y%`
return `{{.x}} must be of an {{.y}}`
}
func (l DefaultLocale) CannotBeUsedWithout() string {
return `%x% cannot be used without %y%`
return `{{.x}} cannot be used without {{.y}}`
}
func (l DefaultLocale) CannotBeGT() string {
return `%x% cannot be greater than %y%`
return `{{.x}} cannot be greater than {{.y}}`
}
func (l DefaultLocale) MustBeOfType() string {
return `%key% must be of type %type%`
return `{{.key}} must be of type {{.type}}`
}
func (l DefaultLocale) MustBeValidRegex() string {
return `%key% must be a valid regex`
return `{{.key}} must be a valid regex`
}
func (l DefaultLocale) MustBeValidFormat() string {
return `%key% must be a valid format %given%`
return `{{.key}} must be a valid format {{.given}}`
}
func (l DefaultLocale) MustBeGTEZero() string {
return `%key% must be greater than or equal to 0`
return `{{.key}} must be greater than or equal to 0`
}
func (l DefaultLocale) KeyCannotBeGreaterThan() string {
return `%key% cannot be greater than %y%`
return `{{.key}} cannot be greater than {{.y}}`
}
func (l DefaultLocale) KeyItemsMustBeOfType() string {
return `%key% items must be %type%`
return `{{.key}} items must be {{.type}}`
}
func (l DefaultLocale) KeyItemsMustBeUnique() string {
return `%key% items must be unique`
return `{{.key}} items must be unique`
}
func (l DefaultLocale) ReferenceMustBeCanonical() string {
return `Reference %reference% must be canonical`
return `Reference {{.reference}} must be canonical`
}
func (l DefaultLocale) NotAValidType() string {
return `%type% is not a valid type -- `
return `{{.type}} is not a valid type -- `
}
func (l DefaultLocale) Duplicated() string {
return `%type% type is duplicated`
return `{{.type}} type is duplicated`
}
func (l DefaultLocale) httpBadStatus() string {
return `Could not read schema from HTTP, response status is %status%`
return `Could not read schema from HTTP, response status is {{.status}}`
}
// Replacement options: field, description, context, value
func (l DefaultLocale) ErrorFormat() string {
return `%field%: %description%`
return `{{.field}}: {{.description}}`
}
const (

@ -48,6 +48,7 @@ type (
Value() interface{}
SetDetails(ErrorDetails)
Details() ErrorDetails
String() string
}
// ResultErrorFields holds the fields for each ResultError implementation.

@ -42,7 +42,39 @@ var (
)
func NewSchema(l JSONLoader) (*Schema, error) {
return l.loadSchema()
ref, err := l.JsonReference()
if err != nil {
return nil, err
}
d := Schema{}
d.pool = newSchemaPool(l.LoaderFactory())
d.documentReference = ref
d.referencePool = newSchemaReferencePool()
var doc interface{}
if ref.String() != "" {
// Get document from schema pool
spd, err := d.pool.GetDocument(d.documentReference)
if err != nil {
return nil, err
}
doc = spd.Document
} else {
// Load JSON directly
doc, err = l.LoadJSON()
if err != nil {
return nil, err
}
d.pool.SetStandaloneDocument(doc)
}
err = d.parse(doc)
if err != nil {
return nil, err
}
return &d, nil
}
type Schema struct {
@ -116,14 +148,27 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *subSchema)
}
if k, ok := m[KEY_REF].(string); ok {
if sch, ok := d.referencePool.Get(currentSchema.ref.String() + k); ok {
jsonReference, err := gojsonreference.NewJsonReference(k)
if err != nil {
return err
}
if jsonReference.HasFullUrl {
currentSchema.ref = &jsonReference
} else {
inheritedReference, err := currentSchema.ref.Inherits(jsonReference)
if err != nil {
return err
}
currentSchema.ref = inheritedReference
}
if sch, ok := d.referencePool.Get(currentSchema.ref.String() + k); ok {
currentSchema.refSchema = sch
} else {
var err error
err = d.parseReference(documentNode, currentSchema, k)
err := d.parseReference(documentNode, currentSchema, k)
if err != nil {
return err
}
@ -755,30 +800,10 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *subSchema)
return nil
}
func (d *Schema) parseReference(documentNode interface{}, currentSchema *subSchema, reference string) (e error) {
var err error
jsonReference, err := gojsonreference.NewJsonReference(reference)
if err != nil {
return err
}
standaloneDocument := d.pool.GetStandaloneDocument()
if jsonReference.HasFullUrl {
currentSchema.ref = &jsonReference
} else {
inheritedReference, err := currentSchema.ref.Inherits(jsonReference)
if err != nil {
return err
}
currentSchema.ref = inheritedReference
}
jsonPointer := currentSchema.ref.GetPointer()
func (d *Schema) parseReference(documentNode interface{}, currentSchema *subSchema, reference string) error {
var refdDocumentNode interface{}
jsonPointer := currentSchema.ref.GetPointer()
standaloneDocument := d.pool.GetStandaloneDocument()
if standaloneDocument != nil {
@ -789,8 +814,6 @@ func (d *Schema) parseReference(documentNode interface{}, currentSchema *subSche
}
} else {
var err error
dsp, err := d.pool.GetDocument(*currentSchema.ref)
if err != nil {
return err
@ -812,11 +835,10 @@ func (d *Schema) parseReference(documentNode interface{}, currentSchema *subSche
// returns the loaded referenced subSchema for the caller to update its current subSchema
newSchemaDocument := refdDocumentNode.(map[string]interface{})
newSchema := &subSchema{property: KEY_REF, parent: currentSchema, ref: currentSchema.ref}
d.referencePool.Add(currentSchema.ref.String()+reference, newSchema)
err = d.parseSchema(newSchemaDocument, newSchema)
err := d.parseSchema(newSchemaDocument, newSchema)
if err != nil {
return err
}

@ -28,7 +28,6 @@ package gojsonschema
import (
"errors"
"net/http"
"github.com/xeipuuv/gojsonreference"
)
@ -40,15 +39,15 @@ type schemaPoolDocument struct {
type schemaPool struct {
schemaPoolDocuments map[string]*schemaPoolDocument
standaloneDocument interface{}
fs http.FileSystem
jsonLoaderFactory JSONLoaderFactory
}
func newSchemaPool(fs http.FileSystem) *schemaPool {
func newSchemaPool(f JSONLoaderFactory) *schemaPool {
p := &schemaPool{}
p.schemaPoolDocuments = make(map[string]*schemaPoolDocument)
p.standaloneDocument = nil
p.fs = fs
p.jsonLoaderFactory = f
return p
}
@ -96,8 +95,8 @@ func (p *schemaPool) GetDocument(reference gojsonreference.JsonReference) (*sche
return spd, nil
}
jsonReferenceLoader := NewReferenceLoaderFileSystem(reference.String(), p.fs)
document, err := jsonReferenceLoader.loadJSON()
jsonReferenceLoader := p.jsonLoaderFactory.New(reference.String())
document, err := jsonReferenceLoader.LoadJSON()
if err != nil {
return nil, err
}

@ -26,7 +26,10 @@
package gojsonschema
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"reflect"
@ -35,6 +38,8 @@ import (
"strconv"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
const displayErrorMessages = false
@ -58,284 +63,284 @@ func TestJsonSchemaTestSuite(t *testing.T) {
JsonSchemaTestSuiteMap := []map[string]string{
map[string]string{"phase": "integer type matches integers", "test": "an integer is an integer", "schema": "type/schema_0.json", "data": "type/data_00.json", "valid": "true"},
map[string]string{"phase": "integer type matches integers", "test": "a float is not an integer", "schema": "type/schema_0.json", "data": "type/data_01.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "integer type matches integers", "test": "a string is not an integer", "schema": "type/schema_0.json", "data": "type/data_02.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "integer type matches integers", "test": "an object is not an integer", "schema": "type/schema_0.json", "data": "type/data_03.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "integer type matches integers", "test": "an array is not an integer", "schema": "type/schema_0.json", "data": "type/data_04.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "integer type matches integers", "test": "a boolean is not an integer", "schema": "type/schema_0.json", "data": "type/data_05.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "integer type matches integers", "test": "null is not an integer", "schema": "type/schema_0.json", "data": "type/data_06.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "number type matches numbers", "test": "an integer is a number", "schema": "type/schema_1.json", "data": "type/data_10.json", "valid": "true"},
map[string]string{"phase": "number type matches numbers", "test": "a float is a number", "schema": "type/schema_1.json", "data": "type/data_11.json", "valid": "true"},
map[string]string{"phase": "number type matches numbers", "test": "a string is not a number", "schema": "type/schema_1.json", "data": "type/data_12.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "number type matches numbers", "test": "an object is not a number", "schema": "type/schema_1.json", "data": "type/data_13.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "number type matches numbers", "test": "an array is not a number", "schema": "type/schema_1.json", "data": "type/data_14.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "number type matches numbers", "test": "a boolean is not a number", "schema": "type/schema_1.json", "data": "type/data_15.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "number type matches numbers", "test": "null is not a number", "schema": "type/schema_1.json", "data": "type/data_16.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "string type matches strings", "test": "1 is not a string", "schema": "type/schema_2.json", "data": "type/data_20.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "string type matches strings", "test": "a float is not a string", "schema": "type/schema_2.json", "data": "type/data_21.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "string type matches strings", "test": "a string is a string", "schema": "type/schema_2.json", "data": "type/data_22.json", "valid": "true"},
map[string]string{"phase": "string type matches strings", "test": "an object is not a string", "schema": "type/schema_2.json", "data": "type/data_23.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "string type matches strings", "test": "an array is not a string", "schema": "type/schema_2.json", "data": "type/data_24.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "string type matches strings", "test": "a boolean is not a string", "schema": "type/schema_2.json", "data": "type/data_25.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "string type matches strings", "test": "null is not a string", "schema": "type/schema_2.json", "data": "type/data_26.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "object type matches objects", "test": "an integer is not an object", "schema": "type/schema_3.json", "data": "type/data_30.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "object type matches objects", "test": "a float is not an object", "schema": "type/schema_3.json", "data": "type/data_31.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "object type matches objects", "test": "a string is not an object", "schema": "type/schema_3.json", "data": "type/data_32.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "object type matches objects", "test": "an object is an object", "schema": "type/schema_3.json", "data": "type/data_33.json", "valid": "true"},
map[string]string{"phase": "object type matches objects", "test": "an array is not an object", "schema": "type/schema_3.json", "data": "type/data_34.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "object type matches objects", "test": "a boolean is not an object", "schema": "type/schema_3.json", "data": "type/data_35.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "object type matches objects", "test": "null is not an object", "schema": "type/schema_3.json", "data": "type/data_36.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "array type matches arrays", "test": "an integer is not an array", "schema": "type/schema_4.json", "data": "type/data_40.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "array type matches arrays", "test": "a float is not an array", "schema": "type/schema_4.json", "data": "type/data_41.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "array type matches arrays", "test": "a string is not an array", "schema": "type/schema_4.json", "data": "type/data_42.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "array type matches arrays", "test": "an object is not an array", "schema": "type/schema_4.json", "data": "type/data_43.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "array type matches arrays", "test": "an array is not an array", "schema": "type/schema_4.json", "data": "type/data_44.json", "valid": "true"},
map[string]string{"phase": "array type matches arrays", "test": "a boolean is not an array", "schema": "type/schema_4.json", "data": "type/data_45.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "array type matches arrays", "test": "null is not an array", "schema": "type/schema_4.json", "data": "type/data_46.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "boolean type matches booleans", "test": "an integer is not a boolean", "schema": "type/schema_5.json", "data": "type/data_50.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "boolean type matches booleans", "test": "a float is not a boolean", "schema": "type/schema_5.json", "data": "type/data_51.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "boolean type matches booleans", "test": "a string is not a boolean", "schema": "type/schema_5.json", "data": "type/data_52.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "boolean type matches booleans", "test": "an object is not a boolean", "schema": "type/schema_5.json", "data": "type/data_53.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "boolean type matches booleans", "test": "an array is not a boolean", "schema": "type/schema_5.json", "data": "type/data_54.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "boolean type matches booleans", "test": "a boolean is not a boolean", "schema": "type/schema_5.json", "data": "type/data_55.json", "valid": "true", "errors": "invalid_type"},
map[string]string{"phase": "boolean type matches booleans", "test": "null is not a boolean", "schema": "type/schema_5.json", "data": "type/data_56.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "null type matches only the null object", "test": "an integer is not null", "schema": "type/schema_6.json", "data": "type/data_60.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "null type matches only the null object", "test": "a float is not null", "schema": "type/schema_6.json", "data": "type/data_61.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "null type matches only the null object", "test": "a string is not null", "schema": "type/schema_6.json", "data": "type/data_62.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "null type matches only the null object", "test": "an object is not null", "schema": "type/schema_6.json", "data": "type/data_63.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "null type matches only the null object", "test": "an array is not null", "schema": "type/schema_6.json", "data": "type/data_64.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "null type matches only the null object", "test": "a boolean is not null", "schema": "type/schema_6.json", "data": "type/data_65.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "null type matches only the null object", "test": "null is null", "schema": "type/schema_6.json", "data": "type/data_66.json", "valid": "true"},
map[string]string{"phase": "multiple types can be specified in an array", "test": "an integer is valid", "schema": "type/schema_7.json", "data": "type/data_70.json", "valid": "true"},
map[string]string{"phase": "multiple types can be specified in an array", "test": "a string is valid", "schema": "type/schema_7.json", "data": "type/data_71.json", "valid": "true"},
map[string]string{"phase": "multiple types can be specified in an array", "test": "a float is invalid", "schema": "type/schema_7.json", "data": "type/data_72.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "multiple types can be specified in an array", "test": "an object is invalid", "schema": "type/schema_7.json", "data": "type/data_73.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "multiple types can be specified in an array", "test": "an array is invalid", "schema": "type/schema_7.json", "data": "type/data_74.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "multiple types can be specified in an array", "test": "a boolean is invalid", "schema": "type/schema_7.json", "data": "type/data_75.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "multiple types can be specified in an array", "test": "null is invalid", "schema": "type/schema_7.json", "data": "type/data_76.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "required validation", "test": "present required property is valid", "schema": "required/schema_0.json", "data": "required/data_00.json", "valid": "true"},
map[string]string{"phase": "required validation", "test": "non-present required property is invalid", "schema": "required/schema_0.json", "data": "required/data_01.json", "valid": "false", "errors": "required"},
map[string]string{"phase": "required default validation", "test": "not required by default", "schema": "required/schema_1.json", "data": "required/data_10.json", "valid": "true"},
map[string]string{"phase": "uniqueItems validation", "test": "unique array of integers is valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_00.json", "valid": "true"},
map[string]string{"phase": "uniqueItems validation", "test": "non-unique array of integers is invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_01.json", "valid": "false", "errors": "unique"},
map[string]string{"phase": "uniqueItems validation", "test": "numbers are unique if mathematically unequal", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_02.json", "valid": "false", "errors": "unique, unique"},
map[string]string{"phase": "uniqueItems validation", "test": "unique array of objects is valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_03.json", "valid": "true"},
map[string]string{"phase": "uniqueItems validation", "test": "non-unique array of objects is invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_04.json", "valid": "false", "errors": "unique"},
map[string]string{"phase": "uniqueItems validation", "test": "unique array of nested objects is valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_05.json", "valid": "true"},
map[string]string{"phase": "uniqueItems validation", "test": "non-unique array of nested objects is invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_06.json", "valid": "false", "errors": "unique"},
map[string]string{"phase": "uniqueItems validation", "test": "unique array of arrays is valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_07.json", "valid": "true"},
map[string]string{"phase": "uniqueItems validation", "test": "non-unique array of arrays is invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_08.json", "valid": "false", "errors": "unique"},
map[string]string{"phase": "uniqueItems validation", "test": "1 and true are unique", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_09.json", "valid": "true"},
map[string]string{"phase": "uniqueItems validation", "test": "0 and false are unique", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_010.json", "valid": "true"},
map[string]string{"phase": "uniqueItems validation", "test": "unique heterogeneous types are valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_011.json", "valid": "true"},
map[string]string{"phase": "uniqueItems validation", "test": "non-unique heterogeneous types are invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_012.json", "valid": "false", "errors": "unique"},
map[string]string{"phase": "pattern validation", "test": "a matching pattern is valid", "schema": "pattern/schema_0.json", "data": "pattern/data_00.json", "valid": "true"},
map[string]string{"phase": "pattern validation", "test": "a non-matching pattern is invalid", "schema": "pattern/schema_0.json", "data": "pattern/data_01.json", "valid": "false", "errors": "pattern"},
map[string]string{"phase": "pattern validation", "test": "ignores non-strings", "schema": "pattern/schema_0.json", "data": "pattern/data_02.json", "valid": "true"},
map[string]string{"phase": "simple enum validation", "test": "one of the enum is valid", "schema": "enum/schema_0.json", "data": "enum/data_00.json", "valid": "true"},
map[string]string{"phase": "simple enum validation", "test": "something else is invalid", "schema": "enum/schema_0.json", "data": "enum/data_01.json", "valid": "false", "errors": "enum"},
map[string]string{"phase": "heterogeneous enum validation", "test": "one of the enum is valid", "schema": "enum/schema_1.json", "data": "enum/data_10.json", "valid": "true"},
map[string]string{"phase": "heterogeneous enum validation", "test": "something else is invalid", "schema": "enum/schema_1.json", "data": "enum/data_11.json", "valid": "false", "errors": "enum"},
map[string]string{"phase": "heterogeneous enum validation", "test": "objects are deep compared", "schema": "enum/schema_1.json", "data": "enum/data_12.json", "valid": "false", "errors": "enum"},
map[string]string{"phase": "minLength validation", "test": "longer is valid", "schema": "minLength/schema_0.json", "data": "minLength/data_00.json", "valid": "true"},
map[string]string{"phase": "minLength validation", "test": "exact length is valid", "schema": "minLength/schema_0.json", "data": "minLength/data_01.json", "valid": "true"},
map[string]string{"phase": "minLength validation", "test": "too short is invalid", "schema": "minLength/schema_0.json", "data": "minLength/data_02.json", "valid": "false", "errors": "string_gte"},
map[string]string{"phase": "minLength validation", "test": "ignores non-strings", "schema": "minLength/schema_0.json", "data": "minLength/data_03.json", "valid": "true"},
map[string]string{"phase": "minLength validation", "test": "counts utf8 length correctly", "schema": "minLength/schema_0.json", "data": "minLength/data_04.json", "valid": "false", "errors": "string_gte"},
map[string]string{"phase": "maxLength validation", "test": "shorter is valid", "schema": "maxLength/schema_0.json", "data": "maxLength/data_00.json", "valid": "true"},
map[string]string{"phase": "maxLength validation", "test": "exact length is valid", "schema": "maxLength/schema_0.json", "data": "maxLength/data_01.json", "valid": "true"},
map[string]string{"phase": "maxLength validation", "test": "too long is invalid", "schema": "maxLength/schema_0.json", "data": "maxLength/data_02.json", "valid": "false", "errors": "string_lte"},
map[string]string{"phase": "maxLength validation", "test": "ignores non-strings", "schema": "maxLength/schema_0.json", "data": "maxLength/data_03.json", "valid": "true"},
map[string]string{"phase": "maxLength validation", "test": "counts utf8 length correctly", "schema": "maxLength/schema_0.json", "data": "maxLength/data_04.json", "valid": "true"},
map[string]string{"phase": "minimum validation", "test": "above the minimum is valid", "schema": "minimum/schema_0.json", "data": "minimum/data_00.json", "valid": "true"},
map[string]string{"phase": "minimum validation", "test": "below the minimum is invalid", "schema": "minimum/schema_0.json", "data": "minimum/data_01.json", "valid": "false", "errors": "number_gte"},
map[string]string{"phase": "minimum validation", "test": "ignores non-numbers", "schema": "minimum/schema_0.json", "data": "minimum/data_02.json", "valid": "true"},
map[string]string{"phase": "exclusiveMinimum validation", "test": "above the minimum is still valid", "schema": "minimum/schema_1.json", "data": "minimum/data_10.json", "valid": "true"},
map[string]string{"phase": "exclusiveMinimum validation", "test": "boundary point is invalid", "schema": "minimum/schema_1.json", "data": "minimum/data_11.json", "valid": "false", "errors": "number_gt"},
map[string]string{"phase": "maximum validation", "test": "below the maximum is valid", "schema": "maximum/schema_0.json", "data": "maximum/data_00.json", "valid": "true"},
map[string]string{"phase": "maximum validation", "test": "above the maximum is invalid", "schema": "maximum/schema_0.json", "data": "maximum/data_01.json", "valid": "false", "errors": "number_lte"},
map[string]string{"phase": "maximum validation", "test": "ignores non-numbers", "schema": "maximum/schema_0.json", "data": "maximum/data_02.json", "valid": "true"},
map[string]string{"phase": "exclusiveMaximum validation", "test": "below the maximum is still valid", "schema": "maximum/schema_1.json", "data": "maximum/data_10.json", "valid": "true"},
map[string]string{"phase": "exclusiveMaximum validation", "test": "boundary point is invalid", "schema": "maximum/schema_1.json", "data": "maximum/data_11.json", "valid": "false", "errors": "number_lt"},
map[string]string{"phase": "allOf", "test": "allOf", "schema": "allOf/schema_0.json", "data": "allOf/data_00.json", "valid": "true"},
map[string]string{"phase": "allOf", "test": "mismatch second", "schema": "allOf/schema_0.json", "data": "allOf/data_01.json", "valid": "false", "errors": "number_all_of, required"},
map[string]string{"phase": "allOf", "test": "mismatch first", "schema": "allOf/schema_0.json", "data": "allOf/data_02.json", "valid": "false", "errors": "number_all_of, required"},
map[string]string{"phase": "allOf", "test": "wrong type", "schema": "allOf/schema_0.json", "data": "allOf/data_03.json", "valid": "false", "errors": "number_all_of, invalid_type"},
map[string]string{"phase": "allOf with base schema", "test": "valid", "schema": "allOf/schema_1.json", "data": "allOf/data_10.json", "valid": "true"},
map[string]string{"phase": "allOf with base schema", "test": "mismatch base schema", "schema": "allOf/schema_1.json", "data": "allOf/data_11.json", "valid": "false", "errors": "required"},
map[string]string{"phase": "allOf with base schema", "test": "mismatch first allOf", "schema": "allOf/schema_1.json", "data": "allOf/data_12.json", "valid": "false", "errors": "number_all_of, required"},
map[string]string{"phase": "allOf with base schema", "test": "mismatch second allOf", "schema": "allOf/schema_1.json", "data": "allOf/data_13.json", "valid": "false", "errors": "number_all_of, required"},
map[string]string{"phase": "allOf with base schema", "test": "mismatch both", "schema": "allOf/schema_1.json", "data": "allOf/data_14.json", "valid": "false", "errors": "number_all_of, required, required"},
map[string]string{"phase": "allOf simple types", "test": "valid", "schema": "allOf/schema_2.json", "data": "allOf/data_20.json", "valid": "true"},
map[string]string{"phase": "allOf simple types", "test": "mismatch one", "schema": "allOf/schema_2.json", "data": "allOf/data_21.json", "valid": "false", "errors": "number_all_of, number_lte"},
map[string]string{"phase": "oneOf", "test": "first oneOf valid", "schema": "oneOf/schema_0.json", "data": "oneOf/data_00.json", "valid": "true"},
map[string]string{"phase": "oneOf", "test": "second oneOf valid", "schema": "oneOf/schema_0.json", "data": "oneOf/data_01.json", "valid": "true"},
map[string]string{"phase": "oneOf", "test": "both oneOf valid", "schema": "oneOf/schema_0.json", "data": "oneOf/data_02.json", "valid": "false", "errors": "number_one_of"},
map[string]string{"phase": "oneOf", "test": "neither oneOf valid", "schema": "oneOf/schema_0.json", "data": "oneOf/data_03.json", "valid": "false"},
map[string]string{"phase": "oneOf with base schema", "test": "mismatch base schema", "schema": "oneOf/schema_1.json", "data": "oneOf/data_10.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "oneOf with base schema", "test": "one oneOf valid", "schema": "oneOf/schema_1.json", "data": "oneOf/data_11.json", "valid": "true"},
map[string]string{"phase": "oneOf with base schema", "test": "both oneOf valid", "schema": "oneOf/schema_1.json", "data": "oneOf/data_12.json", "valid": "false", "errors": "number_one_of"},
map[string]string{"phase": "anyOf", "test": "first anyOf valid", "schema": "anyOf/schema_0.json", "data": "anyOf/data_00.json", "valid": "true"},
map[string]string{"phase": "anyOf", "test": "second anyOf valid", "schema": "anyOf/schema_0.json", "data": "anyOf/data_01.json", "valid": "true"},
map[string]string{"phase": "anyOf", "test": "both anyOf valid", "schema": "anyOf/schema_0.json", "data": "anyOf/data_02.json", "valid": "true"},
map[string]string{"phase": "anyOf", "test": "neither anyOf valid", "schema": "anyOf/schema_0.json", "data": "anyOf/data_03.json", "valid": "false", "errors": "number_any_of, number_gte"},
map[string]string{"phase": "anyOf with base schema", "test": "mismatch base schema", "schema": "anyOf/schema_1.json", "data": "anyOf/data_10.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "anyOf with base schema", "test": "one anyOf valid", "schema": "anyOf/schema_1.json", "data": "anyOf/data_11.json", "valid": "true"},
map[string]string{"phase": "anyOf with base schema", "test": "both anyOf invalid", "schema": "anyOf/schema_1.json", "data": "anyOf/data_12.json", "valid": "false", "errors": "number_any_of, string_lte"},
map[string]string{"phase": "not", "test": "allowed", "schema": "not/schema_0.json", "data": "not/data_00.json", "valid": "true"},
map[string]string{"phase": "not", "test": "disallowed", "schema": "not/schema_0.json", "data": "not/data_01.json", "valid": "false", "errors": "number_not"},
map[string]string{"phase": "not multiple types", "test": "valid", "schema": "not/schema_1.json", "data": "not/data_10.json", "valid": "true"},
map[string]string{"phase": "not multiple types", "test": "mismatch", "schema": "not/schema_1.json", "data": "not/data_11.json", "valid": "false", "errors": "number_not"},
map[string]string{"phase": "not multiple types", "test": "other mismatch", "schema": "not/schema_1.json", "data": "not/data_12.json", "valid": "false", "errors": "number_not"},
map[string]string{"phase": "not more complex schema", "test": "match", "schema": "not/schema_2.json", "data": "not/data_20.json", "valid": "true"},
map[string]string{"phase": "not more complex schema", "test": "other match", "schema": "not/schema_2.json", "data": "not/data_21.json", "valid": "true"},
map[string]string{"phase": "not more complex schema", "test": "mismatch", "schema": "not/schema_2.json", "data": "not/data_22.json", "valid": "false", "errors": "number_not"},
map[string]string{"phase": "minProperties validation", "test": "longer is valid", "schema": "minProperties/schema_0.json", "data": "minProperties/data_00.json", "valid": "true"},
map[string]string{"phase": "minProperties validation", "test": "exact length is valid", "schema": "minProperties/schema_0.json", "data": "minProperties/data_01.json", "valid": "true"},
map[string]string{"phase": "minProperties validation", "test": "too short is invalid", "schema": "minProperties/schema_0.json", "data": "minProperties/data_02.json", "valid": "false", "errors": "array_min_properties"},
map[string]string{"phase": "minProperties validation", "test": "ignores non-objects", "schema": "minProperties/schema_0.json", "data": "minProperties/data_03.json", "valid": "true"},
map[string]string{"phase": "maxProperties validation", "test": "shorter is valid", "schema": "maxProperties/schema_0.json", "data": "maxProperties/data_00.json", "valid": "true"},
map[string]string{"phase": "maxProperties validation", "test": "exact length is valid", "schema": "maxProperties/schema_0.json", "data": "maxProperties/data_01.json", "valid": "true"},
map[string]string{"phase": "maxProperties validation", "test": "too long is invalid", "schema": "maxProperties/schema_0.json", "data": "maxProperties/data_02.json", "valid": "false", "errors": "array_max_properties"},
map[string]string{"phase": "maxProperties validation", "test": "ignores non-objects", "schema": "maxProperties/schema_0.json", "data": "maxProperties/data_03.json", "valid": "true"},
map[string]string{"phase": "by int", "test": "int by int", "schema": "multipleOf/schema_0.json", "data": "multipleOf/data_00.json", "valid": "true"},
map[string]string{"phase": "by int", "test": "int by int fail", "schema": "multipleOf/schema_0.json", "data": "multipleOf/data_01.json", "valid": "false", "errors": "multiple_of"},
map[string]string{"phase": "by int", "test": "ignores non-numbers", "schema": "multipleOf/schema_0.json", "data": "multipleOf/data_02.json", "valid": "true"},
map[string]string{"phase": "by number", "test": "zero is multiple of anything", "schema": "multipleOf/schema_1.json", "data": "multipleOf/data_10.json", "valid": "true"},
map[string]string{"phase": "by number", "test": "4.5 is multiple of 1.5", "schema": "multipleOf/schema_1.json", "data": "multipleOf/data_11.json", "valid": "true"},
map[string]string{"phase": "by number", "test": "35 is not multiple of 1.5", "schema": "multipleOf/schema_1.json", "data": "multipleOf/data_12.json", "valid": "false", "errors": "multiple_of"},
map[string]string{"phase": "by small number", "test": "0.0075 is multiple of 0.0001", "schema": "multipleOf/schema_2.json", "data": "multipleOf/data_20.json", "valid": "true"},
map[string]string{"phase": "by small number", "test": "0.00751 is not multiple of 0.0001", "schema": "multipleOf/schema_2.json", "data": "multipleOf/data_21.json", "valid": "false", "errors": "multiple_of"},
map[string]string{"phase": "minItems validation", "test": "longer is valid", "schema": "minItems/schema_0.json", "data": "minItems/data_00.json", "valid": "true"},
map[string]string{"phase": "minItems validation", "test": "exact length is valid", "schema": "minItems/schema_0.json", "data": "minItems/data_01.json", "valid": "true"},
map[string]string{"phase": "minItems validation", "test": "too short is invalid", "schema": "minItems/schema_0.json", "data": "minItems/data_02.json", "valid": "false", "errors": "array_min_items"},
map[string]string{"phase": "minItems validation", "test": "ignores non-arrays", "schema": "minItems/schema_0.json", "data": "minItems/data_03.json", "valid": "true"},
map[string]string{"phase": "maxItems validation", "test": "shorter is valid", "schema": "maxItems/schema_0.json", "data": "maxItems/data_00.json", "valid": "true"},
map[string]string{"phase": "maxItems validation", "test": "exact length is valid", "schema": "maxItems/schema_0.json", "data": "maxItems/data_01.json", "valid": "true"},
map[string]string{"phase": "maxItems validation", "test": "too long is invalid", "schema": "maxItems/schema_0.json", "data": "maxItems/data_02.json", "valid": "false", "errors": "array_max_items"},
map[string]string{"phase": "maxItems validation", "test": "ignores non-arrays", "schema": "maxItems/schema_0.json", "data": "maxItems/data_03.json", "valid": "true"},
map[string]string{"phase": "object properties validation", "test": "both properties present and valid is valid", "schema": "properties/schema_0.json", "data": "properties/data_00.json", "valid": "true"},
map[string]string{"phase": "object properties validation", "test": "one property invalid is invalid", "schema": "properties/schema_0.json", "data": "properties/data_01.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "object properties validation", "test": "both properties invalid is invalid", "schema": "properties/schema_0.json", "data": "properties/data_02.json", "valid": "false", "errors": "invalid_type, invalid_type"},
map[string]string{"phase": "object properties validation", "test": "doesn't invalidate other properties", "schema": "properties/schema_0.json", "data": "properties/data_03.json", "valid": "true"},
map[string]string{"phase": "object properties validation", "test": "ignores non-objects", "schema": "properties/schema_0.json", "data": "properties/data_04.json", "valid": "true"},
map[string]string{"phase": "properties, patternProperties, additionalProperties interaction", "test": "property validates property", "schema": "properties/schema_1.json", "data": "properties/data_10.json", "valid": "true"},
map[string]string{"phase": "properties, patternProperties, additionalProperties interaction", "test": "property invalidates property", "schema": "properties/schema_1.json", "data": "properties/data_11.json", "valid": "false", "errors": "array_max_items"},
map[string]string{"phase": "properties, patternProperties, additionalProperties interaction", "test": "patternProperty invalidates property", "schema": "properties/schema_1.json", "data": "properties/data_12.json", "valid": "false", "errors": "array_min_items, invalid_type"},
map[string]string{"phase": "properties, patternProperties, additionalProperties interaction", "test": "patternProperty validates nonproperty", "schema": "properties/schema_1.json", "data": "properties/data_13.json", "valid": "true"},
map[string]string{"phase": "properties, patternProperties, additionalProperties interaction", "test": "patternProperty invalidates nonproperty", "schema": "properties/schema_1.json", "data": "properties/data_14.json", "valid": "false", "errors": "array_min_items, invalid_type"},
map[string]string{"phase": "properties, patternProperties, additionalProperties interaction", "test": "additionalProperty ignores property", "schema": "properties/schema_1.json", "data": "properties/data_15.json", "valid": "true"},
map[string]string{"phase": "properties, patternProperties, additionalProperties interaction", "test": "additionalProperty validates others", "schema": "properties/schema_1.json", "data": "properties/data_16.json", "valid": "true"},
map[string]string{"phase": "properties, patternProperties, additionalProperties interaction", "test": "additionalProperty invalidates others", "schema": "properties/schema_1.json", "data": "properties/data_17.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "root pointer ref", "test": "match", "schema": "ref/schema_0.json", "data": "ref/data_00.json", "valid": "true"},
map[string]string{"phase": "root pointer ref", "test": "recursive match", "schema": "ref/schema_0.json", "data": "ref/data_01.json", "valid": "true"},
map[string]string{"phase": "root pointer ref", "test": "mismatch", "schema": "ref/schema_0.json", "data": "ref/data_02.json", "valid": "false", "errors": "additional_property_not_allowed"},
map[string]string{"phase": "root pointer ref", "test": "recursive mismatch", "schema": "ref/schema_0.json", "data": "ref/data_03.json", "valid": "false", "errors": "additional_property_not_allowed"},
map[string]string{"phase": "relative pointer ref to object", "test": "match", "schema": "ref/schema_1.json", "data": "ref/data_10.json", "valid": "true"},
map[string]string{"phase": "relative pointer ref to object", "test": "mismatch", "schema": "ref/schema_1.json", "data": "ref/data_11.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "relative pointer ref to array", "test": "match array", "schema": "ref/schema_2.json", "data": "ref/data_20.json", "valid": "true"},
map[string]string{"phase": "relative pointer ref to array", "test": "mismatch array", "schema": "ref/schema_2.json", "data": "ref/data_21.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "escaped pointer ref", "test": "slash", "schema": "ref/schema_3.json", "data": "ref/data_30.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "escaped pointer ref", "test": "tilda", "schema": "ref/schema_3.json", "data": "ref/data_31.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "escaped pointer ref", "test": "percent", "schema": "ref/schema_3.json", "data": "ref/data_32.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "nested refs", "test": "nested ref valid", "schema": "ref/schema_4.json", "data": "ref/data_40.json", "valid": "true"},
map[string]string{"phase": "nested refs", "test": "nested ref invalid", "schema": "ref/schema_4.json", "data": "ref/data_41.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "remote ref, containing refs itself", "test": "remote ref valid", "schema": "ref/schema_5.json", "data": "ref/data_50.json", "valid": "true"},
map[string]string{"phase": "remote ref, containing refs itself", "test": "remote ref invalid", "schema": "ref/schema_5.json", "data": "ref/data_51.json", "valid": "false", "errors": "number_all_of, number_gte"},
map[string]string{"phase": "a schema given for items", "test": "valid items", "schema": "items/schema_0.json", "data": "items/data_00.json", "valid": "true"},
map[string]string{"phase": "a schema given for items", "test": "wrong type of items", "schema": "items/schema_0.json", "data": "items/data_01.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "a schema given for items", "test": "ignores non-arrays", "schema": "items/schema_0.json", "data": "items/data_02.json", "valid": "true"},
map[string]string{"phase": "an array of schemas for items", "test": "correct types", "schema": "items/schema_1.json", "data": "items/data_10.json", "valid": "true"},
map[string]string{"phase": "an array of schemas for items", "test": "wrong types", "schema": "items/schema_1.json", "data": "items/data_11.json", "valid": "false", "errors": "invalid_type, invalid_type"},
map[string]string{"phase": "valid definition", "test": "valid definition schema", "schema": "definitions/schema_0.json", "data": "definitions/data_00.json", "valid": "true"},
map[string]string{"phase": "invalid definition", "test": "invalid definition schema", "schema": "definitions/schema_1.json", "data": "definitions/data_10.json", "valid": "false", "errors": "number_any_of, enum"},
map[string]string{"phase": "additionalItems as schema", "test": "additional items match schema", "schema": "additionalItems/schema_0.json", "data": "additionalItems/data_00.json", "valid": "true"},
map[string]string{"phase": "additionalItems as schema", "test": "additional items do not match schema", "schema": "additionalItems/schema_0.json", "data": "additionalItems/data_01.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "items is schema, no additionalItems", "test": "all items match schema", "schema": "additionalItems/schema_1.json", "data": "additionalItems/data_10.json", "valid": "true"},
map[string]string{"phase": "array of items with no additionalItems", "test": "no additional items present", "schema": "additionalItems/schema_2.json", "data": "additionalItems/data_20.json", "valid": "true"},
map[string]string{"phase": "array of items with no additionalItems", "test": "additional items are not permitted", "schema": "additionalItems/schema_2.json", "data": "additionalItems/data_21.json", "valid": "false", "errors": "array_no_additional_items"},
map[string]string{"phase": "additionalItems as false without items", "test": "items defaults to empty schema so everything is valid", "schema": "additionalItems/schema_3.json", "data": "additionalItems/data_30.json", "valid": "true"},
map[string]string{"phase": "additionalItems as false without items", "test": "ignores non-arrays", "schema": "additionalItems/schema_3.json", "data": "additionalItems/data_31.json", "valid": "true"},
map[string]string{"phase": "additionalItems are allowed by default", "test": "only the first item is validated", "schema": "additionalItems/schema_4.json", "data": "additionalItems/data_40.json", "valid": "true"},
map[string]string{"phase": "additionalProperties being false does not allow other properties", "test": "no additional properties is valid", "schema": "additionalProperties/schema_0.json", "data": "additionalProperties/data_00.json", "valid": "true"},
map[string]string{"phase": "additionalProperties being false does not allow other properties", "test": "an additional property is invalid", "schema": "additionalProperties/schema_0.json", "data": "additionalProperties/data_01.json", "valid": "false", "errors": "additional_property_not_allowed"},
map[string]string{"phase": "additionalProperties being false does not allow other properties", "test": "ignores non-objects", "schema": "additionalProperties/schema_0.json", "data": "additionalProperties/data_02.json", "valid": "true"},
map[string]string{"phase": "additionalProperties being false does not allow other properties", "test": "patternProperties are not additional properties", "schema": "additionalProperties/schema_0.json", "data": "additionalProperties/data_03.json", "valid": "true"},
map[string]string{"phase": "additionalProperties allows a schema which should validate", "test": "no additional properties is valid", "schema": "additionalProperties/schema_1.json", "data": "additionalProperties/data_10.json", "valid": "true"},
map[string]string{"phase": "additionalProperties allows a schema which should validate", "test": "an additional valid property is valid", "schema": "additionalProperties/schema_1.json", "data": "additionalProperties/data_11.json", "valid": "true"},
map[string]string{"phase": "additionalProperties allows a schema which should validate", "test": "an additional invalid property is invalid", "schema": "additionalProperties/schema_1.json", "data": "additionalProperties/data_12.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "additionalProperties are allowed by default", "test": "additional properties are allowed", "schema": "additionalProperties/schema_2.json", "data": "additionalProperties/data_20.json", "valid": "true"},
map[string]string{"phase": "dependencies", "test": "neither", "schema": "dependencies/schema_0.json", "data": "dependencies/data_00.json", "valid": "true"},
map[string]string{"phase": "dependencies", "test": "nondependant", "schema": "dependencies/schema_0.json", "data": "dependencies/data_01.json", "valid": "true"},
map[string]string{"phase": "dependencies", "test": "with dependency", "schema": "dependencies/schema_0.json", "data": "dependencies/data_02.json", "valid": "true"},
map[string]string{"phase": "dependencies", "test": "missing dependency", "schema": "dependencies/schema_0.json", "data": "dependencies/data_03.json", "valid": "false", "errors": "missing_dependency"},
map[string]string{"phase": "dependencies", "test": "ignores non-objects", "schema": "dependencies/schema_0.json", "data": "dependencies/data_04.json", "valid": "true"},
map[string]string{"phase": "multiple dependencies", "test": "neither", "schema": "dependencies/schema_1.json", "data": "dependencies/data_10.json", "valid": "true"},
map[string]string{"phase": "multiple dependencies", "test": "nondependants", "schema": "dependencies/schema_1.json", "data": "dependencies/data_11.json", "valid": "true"},
map[string]string{"phase": "multiple dependencies", "test": "with dependencies", "schema": "dependencies/schema_1.json", "data": "dependencies/data_12.json", "valid": "true"},
map[string]string{"phase": "multiple dependencies", "test": "missing dependency", "schema": "dependencies/schema_1.json", "data": "dependencies/data_13.json", "valid": "false", "errors": "missing_dependency"},
map[string]string{"phase": "multiple dependencies", "test": "missing other dependency", "schema": "dependencies/schema_1.json", "data": "dependencies/data_14.json", "valid": "false", "errors": "missing_dependency"},
map[string]string{"phase": "multiple dependencies", "test": "missing both dependencies", "schema": "dependencies/schema_1.json", "data": "dependencies/data_15.json", "valid": "false", "errors": "missing_dependency, missing_dependency"},
map[string]string{"phase": "multiple dependencies subschema", "test": "valid", "schema": "dependencies/schema_2.json", "data": "dependencies/data_20.json", "valid": "true"},
map[string]string{"phase": "multiple dependencies subschema", "test": "no dependency", "schema": "dependencies/schema_2.json", "data": "dependencies/data_21.json", "valid": "true"},
map[string]string{"phase": "multiple dependencies subschema", "test": "wrong type", "schema": "dependencies/schema_2.json", "data": "dependencies/data_22.json", "valid": "false"},
map[string]string{"phase": "multiple dependencies subschema", "test": "wrong type other", "schema": "dependencies/schema_2.json", "data": "dependencies/data_23.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "multiple dependencies subschema", "test": "wrong type both", "schema": "dependencies/schema_2.json", "data": "dependencies/data_24.json", "valid": "false", "errors": "invalid_type, invalid_type"},
map[string]string{"phase": "patternProperties validates properties matching a regex", "test": "a single valid match is valid", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_00.json", "valid": "true"},
map[string]string{"phase": "patternProperties validates properties matching a regex", "test": "multiple valid matches is valid", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_01.json", "valid": "true"},
map[string]string{"phase": "patternProperties validates properties matching a regex", "test": "a single invalid match is invalid", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_02.json", "valid": "false", "errors": "invalid_property_pattern, invalid_type"},
map[string]string{"phase": "patternProperties validates properties matching a regex", "test": "multiple invalid matches is invalid", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_03.json", "valid": "false", "errors": "invalid_property_pattern, invalid_property_pattern, invalid_type, invalid_type"},
map[string]string{"phase": "patternProperties validates properties matching a regex", "test": "ignores non-objects", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_04.json", "valid": "true"},
map[string]string{"phase": "patternProperties validates properties matching a regex", "test": "with additionalProperties combination", "schema": "patternProperties/schema_3.json", "data": "patternProperties/data_24.json", "valid": "false", "errors": "additional_property_not_allowed"},
map[string]string{"phase": "patternProperties validates properties matching a regex", "test": "with additionalProperties combination", "schema": "patternProperties/schema_3.json", "data": "patternProperties/data_25.json", "valid": "false", "errors": "additional_property_not_allowed"},
map[string]string{"phase": "patternProperties validates properties matching a regex", "test": "with additionalProperties combination", "schema": "patternProperties/schema_4.json", "data": "patternProperties/data_26.json", "valid": "false", "errors": "additional_property_not_allowed"},
map[string]string{"phase": "multiple simultaneous patternProperties are validated", "test": "a single valid match is valid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_10.json", "valid": "true"},
map[string]string{"phase": "multiple simultaneous patternProperties are validated", "test": "a simultaneous match is valid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_11.json", "valid": "true"},
map[string]string{"phase": "multiple simultaneous patternProperties are validated", "test": "multiple matches is valid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_12.json", "valid": "true"},
map[string]string{"phase": "multiple simultaneous patternProperties are validated", "test": "an invalid due to one is invalid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_13.json", "valid": "false", "errors": "invalid_property_pattern, invalid_type"},
map[string]string{"phase": "multiple simultaneous patternProperties are validated", "test": "an invalid due to the other is invalid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_14.json", "valid": "false", "errors": "number_lte"},
map[string]string{"phase": "multiple simultaneous patternProperties are validated", "test": "an invalid due to both is invalid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_15.json", "valid": "false", "errors": "invalid_type, number_lte"},
map[string]string{"phase": "regexes are not anchored by default and are case sensitive", "test": "non recognized members are ignored", "schema": "patternProperties/schema_2.json", "data": "patternProperties/data_20.json", "valid": "true"},
map[string]string{"phase": "regexes are not anchored by default and are case sensitive", "test": "recognized members are accounted for", "schema": "patternProperties/schema_2.json", "data": "patternProperties/data_21.json", "valid": "false", "errors": "invalid_property_pattern, invalid_type"},
map[string]string{"phase": "regexes are not anchored by default and are case sensitive", "test": "regexes are case sensitive", "schema": "patternProperties/schema_2.json", "data": "patternProperties/data_22.json", "valid": "true"},
map[string]string{"phase": "regexes are not anchored by default and are case sensitive", "test": "regexes are case sensitive, 2", "schema": "patternProperties/schema_2.json", "data": "patternProperties/data_23.json", "valid": "false", "errors": "invalid_property_pattern, invalid_type"},
map[string]string{"phase": "remote ref", "test": "remote ref valid", "schema": "refRemote/schema_0.json", "data": "refRemote/data_00.json", "valid": "true"},
map[string]string{"phase": "remote ref", "test": "remote ref invalid", "schema": "refRemote/schema_0.json", "data": "refRemote/data_01.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "fragment within remote ref", "test": "remote fragment valid", "schema": "refRemote/schema_1.json", "data": "refRemote/data_10.json", "valid": "true"},
map[string]string{"phase": "fragment within remote ref", "test": "remote fragment invalid", "schema": "refRemote/schema_1.json", "data": "refRemote/data_11.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "ref within remote ref", "test": "ref within ref valid", "schema": "refRemote/schema_2.json", "data": "refRemote/data_20.json", "valid": "true"},
map[string]string{"phase": "ref within remote ref", "test": "ref within ref invalid", "schema": "refRemote/schema_2.json", "data": "refRemote/data_21.json", "valid": "false", "errors": "invalid_type"},
map[string]string{"phase": "format validation", "test": "email format is invalid", "schema": "format/schema_0.json", "data": "format/data_00.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "email format is invalid", "schema": "format/schema_0.json", "data": "format/data_01.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "email format valid", "schema": "format/schema_0.json", "data": "format/data_02.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "invoice format valid", "schema": "format/schema_1.json", "data": "format/data_03.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "invoice format is invalid", "schema": "format/schema_1.json", "data": "format/data_04.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_05.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_06.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "date-time format is invalid", "schema": "format/schema_2.json", "data": "format/data_07.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_08.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_09.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_10.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_11.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_12.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "hostname format is valid", "schema": "format/schema_3.json", "data": "format/data_13.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "hostname format is valid", "schema": "format/schema_3.json", "data": "format/data_14.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "hostname format is valid", "schema": "format/schema_3.json", "data": "format/data_15.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "hostname format is invalid", "schema": "format/schema_3.json", "data": "format/data_16.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "hostname format is invalid", "schema": "format/schema_3.json", "data": "format/data_17.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "ipv4 format is valid", "schema": "format/schema_4.json", "data": "format/data_18.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "ipv4 format is invalid", "schema": "format/schema_4.json", "data": "format/data_19.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "ipv6 format is valid", "schema": "format/schema_5.json", "data": "format/data_20.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "ipv6 format is valid", "schema": "format/schema_5.json", "data": "format/data_21.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "ipv6 format is invalid", "schema": "format/schema_5.json", "data": "format/data_22.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "ipv6 format is invalid", "schema": "format/schema_5.json", "data": "format/data_23.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "uri format is valid", "schema": "format/schema_6.json", "data": "format/data_24.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "uri format is valid", "schema": "format/schema_6.json", "data": "format/data_25.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "uri format is valid", "schema": "format/schema_6.json", "data": "format/data_26.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "uri format is valid", "schema": "format/schema_6.json", "data": "format/data_27.json", "valid": "true"},
map[string]string{"phase": "format validation", "test": "uri format is invalid", "schema": "format/schema_6.json", "data": "format/data_28.json", "valid": "false", "errors": "format"},
map[string]string{"phase": "format validation", "test": "uri format is invalid", "schema": "format/schema_6.json", "data": "format/data_13.json", "valid": "false", "errors": "format"},
{"phase": "integer type matches integers", "test": "an integer is an integer", "schema": "type/schema_0.json", "data": "type/data_00.json", "valid": "true"},
{"phase": "integer type matches integers", "test": "a float is not an integer", "schema": "type/schema_0.json", "data": "type/data_01.json", "valid": "false", "errors": "invalid_type"},
{"phase": "integer type matches integers", "test": "a string is not an integer", "schema": "type/schema_0.json", "data": "type/data_02.json", "valid": "false", "errors": "invalid_type"},
{"phase": "integer type matches integers", "test": "an object is not an integer", "schema": "type/schema_0.json", "data": "type/data_03.json", "valid": "false", "errors": "invalid_type"},
{"phase": "integer type matches integers", "test": "an array is not an integer", "schema": "type/schema_0.json", "data": "type/data_04.json", "valid": "false", "errors": "invalid_type"},
{"phase": "integer type matches integers", "test": "a boolean is not an integer", "schema": "type/schema_0.json", "data": "type/data_05.json", "valid": "false", "errors": "invalid_type"},
{"phase": "integer type matches integers", "test": "null is not an integer", "schema": "type/schema_0.json", "data": "type/data_06.json", "valid": "false", "errors": "invalid_type"},
{"phase": "number type matches numbers", "test": "an integer is a number", "schema": "type/schema_1.json", "data": "type/data_10.json", "valid": "true"},
{"phase": "number type matches numbers", "test": "a float is a number", "schema": "type/schema_1.json", "data": "type/data_11.json", "valid": "true"},
{"phase": "number type matches numbers", "test": "a string is not a number", "schema": "type/schema_1.json", "data": "type/data_12.json", "valid": "false", "errors": "invalid_type"},
{"phase": "number type matches numbers", "test": "an object is not a number", "schema": "type/schema_1.json", "data": "type/data_13.json", "valid": "false", "errors": "invalid_type"},
{"phase": "number type matches numbers", "test": "an array is not a number", "schema": "type/schema_1.json", "data": "type/data_14.json", "valid": "false", "errors": "invalid_type"},
{"phase": "number type matches numbers", "test": "a boolean is not a number", "schema": "type/schema_1.json", "data": "type/data_15.json", "valid": "false", "errors": "invalid_type"},
{"phase": "number type matches numbers", "test": "null is not a number", "schema": "type/schema_1.json", "data": "type/data_16.json", "valid": "false", "errors": "invalid_type"},
{"phase": "string type matches strings", "test": "1 is not a string", "schema": "type/schema_2.json", "data": "type/data_20.json", "valid": "false", "errors": "invalid_type"},
{"phase": "string type matches strings", "test": "a float is not a string", "schema": "type/schema_2.json", "data": "type/data_21.json", "valid": "false", "errors": "invalid_type"},
{"phase": "string type matches strings", "test": "a string is a string", "schema": "type/schema_2.json", "data": "type/data_22.json", "valid": "true"},
{"phase": "string type matches strings", "test": "an object is not a string", "schema": "type/schema_2.json", "data": "type/data_23.json", "valid": "false", "errors": "invalid_type"},
{"phase": "string type matches strings", "test": "an array is not a string", "schema": "type/schema_2.json", "data": "type/data_24.json", "valid": "false", "errors": "invalid_type"},
{"phase": "string type matches strings", "test": "a boolean is not a string", "schema": "type/schema_2.json", "data": "type/data_25.json", "valid": "false", "errors": "invalid_type"},
{"phase": "string type matches strings", "test": "null is not a string", "schema": "type/schema_2.json", "data": "type/data_26.json", "valid": "false", "errors": "invalid_type"},
{"phase": "object type matches objects", "test": "an integer is not an object", "schema": "type/schema_3.json", "data": "type/data_30.json", "valid": "false", "errors": "invalid_type"},
{"phase": "object type matches objects", "test": "a float is not an object", "schema": "type/schema_3.json", "data": "type/data_31.json", "valid": "false", "errors": "invalid_type"},
{"phase": "object type matches objects", "test": "a string is not an object", "schema": "type/schema_3.json", "data": "type/data_32.json", "valid": "false", "errors": "invalid_type"},
{"phase": "object type matches objects", "test": "an object is an object", "schema": "type/schema_3.json", "data": "type/data_33.json", "valid": "true"},
{"phase": "object type matches objects", "test": "an array is not an object", "schema": "type/schema_3.json", "data": "type/data_34.json", "valid": "false", "errors": "invalid_type"},
{"phase": "object type matches objects", "test": "a boolean is not an object", "schema": "type/schema_3.json", "data": "type/data_35.json", "valid": "false", "errors": "invalid_type"},
{"phase": "object type matches objects", "test": "null is not an object", "schema": "type/schema_3.json", "data": "type/data_36.json", "valid": "false", "errors": "invalid_type"},
{"phase": "array type matches arrays", "test": "an integer is not an array", "schema": "type/schema_4.json", "data": "type/data_40.json", "valid": "false", "errors": "invalid_type"},
{"phase": "array type matches arrays", "test": "a float is not an array", "schema": "type/schema_4.json", "data": "type/data_41.json", "valid": "false", "errors": "invalid_type"},
{"phase": "array type matches arrays", "test": "a string is not an array", "schema": "type/schema_4.json", "data": "type/data_42.json", "valid": "false", "errors": "invalid_type"},
{"phase": "array type matches arrays", "test": "an object is not an array", "schema": "type/schema_4.json", "data": "type/data_43.json", "valid": "false", "errors": "invalid_type"},
{"phase": "array type matches arrays", "test": "an array is not an array", "schema": "type/schema_4.json", "data": "type/data_44.json", "valid": "true"},
{"phase": "array type matches arrays", "test": "a boolean is not an array", "schema": "type/schema_4.json", "data": "type/data_45.json", "valid": "false", "errors": "invalid_type"},
{"phase": "array type matches arrays", "test": "null is not an array", "schema": "type/schema_4.json", "data": "type/data_46.json", "valid": "false", "errors": "invalid_type"},
{"phase": "boolean type matches booleans", "test": "an integer is not a boolean", "schema": "type/schema_5.json", "data": "type/data_50.json", "valid": "false", "errors": "invalid_type"},
{"phase": "boolean type matches booleans", "test": "a float is not a boolean", "schema": "type/schema_5.json", "data": "type/data_51.json", "valid": "false", "errors": "invalid_type"},
{"phase": "boolean type matches booleans", "test": "a string is not a boolean", "schema": "type/schema_5.json", "data": "type/data_52.json", "valid": "false", "errors": "invalid_type"},
{"phase": "boolean type matches booleans", "test": "an object is not a boolean", "schema": "type/schema_5.json", "data": "type/data_53.json", "valid": "false", "errors": "invalid_type"},
{"phase": "boolean type matches booleans", "test": "an array is not a boolean", "schema": "type/schema_5.json", "data": "type/data_54.json", "valid": "false", "errors": "invalid_type"},
{"phase": "boolean type matches booleans", "test": "a boolean is not a boolean", "schema": "type/schema_5.json", "data": "type/data_55.json", "valid": "true", "errors": "invalid_type"},
{"phase": "boolean type matches booleans", "test": "null is not a boolean", "schema": "type/schema_5.json", "data": "type/data_56.json", "valid": "false", "errors": "invalid_type"},
{"phase": "null type matches only the null object", "test": "an integer is not null", "schema": "type/schema_6.json", "data": "type/data_60.json", "valid": "false", "errors": "invalid_type"},
{"phase": "null type matches only the null object", "test": "a float is not null", "schema": "type/schema_6.json", "data": "type/data_61.json", "valid": "false", "errors": "invalid_type"},
{"phase": "null type matches only the null object", "test": "a string is not null", "schema": "type/schema_6.json", "data": "type/data_62.json", "valid": "false", "errors": "invalid_type"},
{"phase": "null type matches only the null object", "test": "an object is not null", "schema": "type/schema_6.json", "data": "type/data_63.json", "valid": "false", "errors": "invalid_type"},
{"phase": "null type matches only the null object", "test": "an array is not null", "schema": "type/schema_6.json", "data": "type/data_64.json", "valid": "false", "errors": "invalid_type"},
{"phase": "null type matches only the null object", "test": "a boolean is not null", "schema": "type/schema_6.json", "data": "type/data_65.json", "valid": "false", "errors": "invalid_type"},
{"phase": "null type matches only the null object", "test": "null is null", "schema": "type/schema_6.json", "data": "type/data_66.json", "valid": "true"},
{"phase": "multiple types can be specified in an array", "test": "an integer is valid", "schema": "type/schema_7.json", "data": "type/data_70.json", "valid": "true"},
{"phase": "multiple types can be specified in an array", "test": "a string is valid", "schema": "type/schema_7.json", "data": "type/data_71.json", "valid": "true"},
{"phase": "multiple types can be specified in an array", "test": "a float is invalid", "schema": "type/schema_7.json", "data": "type/data_72.json", "valid": "false", "errors": "invalid_type"},
{"phase": "multiple types can be specified in an array", "test": "an object is invalid", "schema": "type/schema_7.json", "data": "type/data_73.json", "valid": "false", "errors": "invalid_type"},
{"phase": "multiple types can be specified in an array", "test": "an array is invalid", "schema": "type/schema_7.json", "data": "type/data_74.json", "valid": "false", "errors": "invalid_type"},
{"phase": "multiple types can be specified in an array", "test": "a boolean is invalid", "schema": "type/schema_7.json", "data": "type/data_75.json", "valid": "false", "errors": "invalid_type"},
{"phase": "multiple types can be specified in an array", "test": "null is invalid", "schema": "type/schema_7.json", "data": "type/data_76.json", "valid": "false", "errors": "invalid_type"},
{"phase": "required validation", "test": "present required property is valid", "schema": "required/schema_0.json", "data": "required/data_00.json", "valid": "true"},
{"phase": "required validation", "test": "non-present required property is invalid", "schema": "required/schema_0.json", "data": "required/data_01.json", "valid": "false", "errors": "required"},
{"phase": "required default validation", "test": "not required by default", "schema": "required/schema_1.json", "data": "required/data_10.json", "valid": "true"},
{"phase": "uniqueItems validation", "test": "unique array of integers is valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_00.json", "valid": "true"},
{"phase": "uniqueItems validation", "test": "non-unique array of integers is invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_01.json", "valid": "false", "errors": "unique"},
{"phase": "uniqueItems validation", "test": "numbers are unique if mathematically unequal", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_02.json", "valid": "false", "errors": "unique, unique"},
{"phase": "uniqueItems validation", "test": "unique array of objects is valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_03.json", "valid": "true"},
{"phase": "uniqueItems validation", "test": "non-unique array of objects is invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_04.json", "valid": "false", "errors": "unique"},
{"phase": "uniqueItems validation", "test": "unique array of nested objects is valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_05.json", "valid": "true"},
{"phase": "uniqueItems validation", "test": "non-unique array of nested objects is invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_06.json", "valid": "false", "errors": "unique"},
{"phase": "uniqueItems validation", "test": "unique array of arrays is valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_07.json", "valid": "true"},
{"phase": "uniqueItems validation", "test": "non-unique array of arrays is invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_08.json", "valid": "false", "errors": "unique"},
{"phase": "uniqueItems validation", "test": "1 and true are unique", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_09.json", "valid": "true"},
{"phase": "uniqueItems validation", "test": "0 and false are unique", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_010.json", "valid": "true"},
{"phase": "uniqueItems validation", "test": "unique heterogeneous types are valid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_011.json", "valid": "true"},
{"phase": "uniqueItems validation", "test": "non-unique heterogeneous types are invalid", "schema": "uniqueItems/schema_0.json", "data": "uniqueItems/data_012.json", "valid": "false", "errors": "unique"},
{"phase": "pattern validation", "test": "a matching pattern is valid", "schema": "pattern/schema_0.json", "data": "pattern/data_00.json", "valid": "true"},
{"phase": "pattern validation", "test": "a non-matching pattern is invalid", "schema": "pattern/schema_0.json", "data": "pattern/data_01.json", "valid": "false", "errors": "pattern"},
{"phase": "pattern validation", "test": "ignores non-strings", "schema": "pattern/schema_0.json", "data": "pattern/data_02.json", "valid": "true"},
{"phase": "simple enum validation", "test": "one of the enum is valid", "schema": "enum/schema_0.json", "data": "enum/data_00.json", "valid": "true"},
{"phase": "simple enum validation", "test": "something else is invalid", "schema": "enum/schema_0.json", "data": "enum/data_01.json", "valid": "false", "errors": "enum"},
{"phase": "heterogeneous enum validation", "test": "one of the enum is valid", "schema": "enum/schema_1.json", "data": "enum/data_10.json", "valid": "true"},
{"phase": "heterogeneous enum validation", "test": "something else is invalid", "schema": "enum/schema_1.json", "data": "enum/data_11.json", "valid": "false", "errors": "enum"},
{"phase": "heterogeneous enum validation", "test": "objects are deep compared", "schema": "enum/schema_1.json", "data": "enum/data_12.json", "valid": "false", "errors": "enum"},
{"phase": "minLength validation", "test": "longer is valid", "schema": "minLength/schema_0.json", "data": "minLength/data_00.json", "valid": "true"},
{"phase": "minLength validation", "test": "exact length is valid", "schema": "minLength/schema_0.json", "data": "minLength/data_01.json", "valid": "true"},
{"phase": "minLength validation", "test": "too short is invalid", "schema": "minLength/schema_0.json", "data": "minLength/data_02.json", "valid": "false", "errors": "string_gte"},
{"phase": "minLength validation", "test": "ignores non-strings", "schema": "minLength/schema_0.json", "data": "minLength/data_03.json", "valid": "true"},
{"phase": "minLength validation", "test": "counts utf8 length correctly", "schema": "minLength/schema_0.json", "data": "minLength/data_04.json", "valid": "false", "errors": "string_gte"},
{"phase": "maxLength validation", "test": "shorter is valid", "schema": "maxLength/schema_0.json", "data": "maxLength/data_00.json", "valid": "true"},
{"phase": "maxLength validation", "test": "exact length is valid", "schema": "maxLength/schema_0.json", "data": "maxLength/data_01.json", "valid": "true"},
{"phase": "maxLength validation", "test": "too long is invalid", "schema": "maxLength/schema_0.json", "data": "maxLength/data_02.json", "valid": "false", "errors": "string_lte"},
{"phase": "maxLength validation", "test": "ignores non-strings", "schema": "maxLength/schema_0.json", "data": "maxLength/data_03.json", "valid": "true"},
{"phase": "maxLength validation", "test": "counts utf8 length correctly", "schema": "maxLength/schema_0.json", "data": "maxLength/data_04.json", "valid": "true"},
{"phase": "minimum validation", "test": "above the minimum is valid", "schema": "minimum/schema_0.json", "data": "minimum/data_00.json", "valid": "true"},
{"phase": "minimum validation", "test": "below the minimum is invalid", "schema": "minimum/schema_0.json", "data": "minimum/data_01.json", "valid": "false", "errors": "number_gte"},
{"phase": "minimum validation", "test": "ignores non-numbers", "schema": "minimum/schema_0.json", "data": "minimum/data_02.json", "valid": "true"},
{"phase": "exclusiveMinimum validation", "test": "above the minimum is still valid", "schema": "minimum/schema_1.json", "data": "minimum/data_10.json", "valid": "true"},
{"phase": "exclusiveMinimum validation", "test": "boundary point is invalid", "schema": "minimum/schema_1.json", "data": "minimum/data_11.json", "valid": "false", "errors": "number_gt"},
{"phase": "maximum validation", "test": "below the maximum is valid", "schema": "maximum/schema_0.json", "data": "maximum/data_00.json", "valid": "true"},
{"phase": "maximum validation", "test": "above the maximum is invalid", "schema": "maximum/schema_0.json", "data": "maximum/data_01.json", "valid": "false", "errors": "number_lte"},
{"phase": "maximum validation", "test": "ignores non-numbers", "schema": "maximum/schema_0.json", "data": "maximum/data_02.json", "valid": "true"},
{"phase": "exclusiveMaximum validation", "test": "below the maximum is still valid", "schema": "maximum/schema_1.json", "data": "maximum/data_10.json", "valid": "true"},
{"phase": "exclusiveMaximum validation", "test": "boundary point is invalid", "schema": "maximum/schema_1.json", "data": "maximum/data_11.json", "valid": "false", "errors": "number_lt"},
{"phase": "allOf", "test": "allOf", "schema": "allOf/schema_0.json", "data": "allOf/data_00.json", "valid": "true"},
{"phase": "allOf", "test": "mismatch second", "schema": "allOf/schema_0.json", "data": "allOf/data_01.json", "valid": "false", "errors": "number_all_of, required"},
{"phase": "allOf", "test": "mismatch first", "schema": "allOf/schema_0.json", "data": "allOf/data_02.json", "valid": "false", "errors": "number_all_of, required"},
{"phase": "allOf", "test": "wrong type", "schema": "allOf/schema_0.json", "data": "allOf/data_03.json", "valid": "false", "errors": "number_all_of, invalid_type"},
{"phase": "allOf with base schema", "test": "valid", "schema": "allOf/schema_1.json", "data": "allOf/data_10.json", "valid": "true"},
{"phase": "allOf with base schema", "test": "mismatch base schema", "schema": "allOf/schema_1.json", "data": "allOf/data_11.json", "valid": "false", "errors": "required"},
{"phase": "allOf with base schema", "test": "mismatch first allOf", "schema": "allOf/schema_1.json", "data": "allOf/data_12.json", "valid": "false", "errors": "number_all_of, required"},
{"phase": "allOf with base schema", "test": "mismatch second allOf", "schema": "allOf/schema_1.json", "data": "allOf/data_13.json", "valid": "false", "errors": "number_all_of, required"},
{"phase": "allOf with base schema", "test": "mismatch both", "schema": "allOf/schema_1.json", "data": "allOf/data_14.json", "valid": "false", "errors": "number_all_of, required, required"},
{"phase": "allOf simple types", "test": "valid", "schema": "allOf/schema_2.json", "data": "allOf/data_20.json", "valid": "true"},
{"phase": "allOf simple types", "test": "mismatch one", "schema": "allOf/schema_2.json", "data": "allOf/data_21.json", "valid": "false", "errors": "number_all_of, number_lte"},
{"phase": "oneOf", "test": "first oneOf valid", "schema": "oneOf/schema_0.json", "data": "oneOf/data_00.json", "valid": "true"},
{"phase": "oneOf", "test": "second oneOf valid", "schema": "oneOf/schema_0.json", "data": "oneOf/data_01.json", "valid": "true"},
{"phase": "oneOf", "test": "both oneOf valid", "schema": "oneOf/schema_0.json", "data": "oneOf/data_02.json", "valid": "false", "errors": "number_one_of"},
{"phase": "oneOf", "test": "neither oneOf valid", "schema": "oneOf/schema_0.json", "data": "oneOf/data_03.json", "valid": "false"},
{"phase": "oneOf with base schema", "test": "mismatch base schema", "schema": "oneOf/schema_1.json", "data": "oneOf/data_10.json", "valid": "false", "errors": "invalid_type"},
{"phase": "oneOf with base schema", "test": "one oneOf valid", "schema": "oneOf/schema_1.json", "data": "oneOf/data_11.json", "valid": "true"},
{"phase": "oneOf with base schema", "test": "both oneOf valid", "schema": "oneOf/schema_1.json", "data": "oneOf/data_12.json", "valid": "false", "errors": "number_one_of"},
{"phase": "anyOf", "test": "first anyOf valid", "schema": "anyOf/schema_0.json", "data": "anyOf/data_00.json", "valid": "true"},
{"phase": "anyOf", "test": "second anyOf valid", "schema": "anyOf/schema_0.json", "data": "anyOf/data_01.json", "valid": "true"},
{"phase": "anyOf", "test": "both anyOf valid", "schema": "anyOf/schema_0.json", "data": "anyOf/data_02.json", "valid": "true"},
{"phase": "anyOf", "test": "neither anyOf valid", "schema": "anyOf/schema_0.json", "data": "anyOf/data_03.json", "valid": "false", "errors": "number_any_of, number_gte"},
{"phase": "anyOf with base schema", "test": "mismatch base schema", "schema": "anyOf/schema_1.json", "data": "anyOf/data_10.json", "valid": "false", "errors": "invalid_type"},
{"phase": "anyOf with base schema", "test": "one anyOf valid", "schema": "anyOf/schema_1.json", "data": "anyOf/data_11.json", "valid": "true"},
{"phase": "anyOf with base schema", "test": "both anyOf invalid", "schema": "anyOf/schema_1.json", "data": "anyOf/data_12.json", "valid": "false", "errors": "number_any_of, string_lte"},
{"phase": "not", "test": "allowed", "schema": "not/schema_0.json", "data": "not/data_00.json", "valid": "true"},
{"phase": "not", "test": "disallowed", "schema": "not/schema_0.json", "data": "not/data_01.json", "valid": "false", "errors": "number_not"},
{"phase": "not multiple types", "test": "valid", "schema": "not/schema_1.json", "data": "not/data_10.json", "valid": "true"},
{"phase": "not multiple types", "test": "mismatch", "schema": "not/schema_1.json", "data": "not/data_11.json", "valid": "false", "errors": "number_not"},
{"phase": "not multiple types", "test": "other mismatch", "schema": "not/schema_1.json", "data": "not/data_12.json", "valid": "false", "errors": "number_not"},
{"phase": "not more complex schema", "test": "match", "schema": "not/schema_2.json", "data": "not/data_20.json", "valid": "true"},
{"phase": "not more complex schema", "test": "other match", "schema": "not/schema_2.json", "data": "not/data_21.json", "valid": "true"},
{"phase": "not more complex schema", "test": "mismatch", "schema": "not/schema_2.json", "data": "not/data_22.json", "valid": "false", "errors": "number_not"},
{"phase": "minProperties validation", "test": "longer is valid", "schema": "minProperties/schema_0.json", "data": "minProperties/data_00.json", "valid": "true"},
{"phase": "minProperties validation", "test": "exact length is valid", "schema": "minProperties/schema_0.json", "data": "minProperties/data_01.json", "valid": "true"},
{"phase": "minProperties validation", "test": "too short is invalid", "schema": "minProperties/schema_0.json", "data": "minProperties/data_02.json", "valid": "false", "errors": "array_min_properties"},
{"phase": "minProperties validation", "test": "ignores non-objects", "schema": "minProperties/schema_0.json", "data": "minProperties/data_03.json", "valid": "true"},
{"phase": "maxProperties validation", "test": "shorter is valid", "schema": "maxProperties/schema_0.json", "data": "maxProperties/data_00.json", "valid": "true"},
{"phase": "maxProperties validation", "test": "exact length is valid", "schema": "maxProperties/schema_0.json", "data": "maxProperties/data_01.json", "valid": "true"},
{"phase": "maxProperties validation", "test": "too long is invalid", "schema": "maxProperties/schema_0.json", "data": "maxProperties/data_02.json", "valid": "false", "errors": "array_max_properties"},
{"phase": "maxProperties validation", "test": "ignores non-objects", "schema": "maxProperties/schema_0.json", "data": "maxProperties/data_03.json", "valid": "true"},
{"phase": "by int", "test": "int by int", "schema": "multipleOf/schema_0.json", "data": "multipleOf/data_00.json", "valid": "true"},
{"phase": "by int", "test": "int by int fail", "schema": "multipleOf/schema_0.json", "data": "multipleOf/data_01.json", "valid": "false", "errors": "multiple_of"},
{"phase": "by int", "test": "ignores non-numbers", "schema": "multipleOf/schema_0.json", "data": "multipleOf/data_02.json", "valid": "true"},
{"phase": "by number", "test": "zero is multiple of anything", "schema": "multipleOf/schema_1.json", "data": "multipleOf/data_10.json", "valid": "true"},
{"phase": "by number", "test": "4.5 is multiple of 1.5", "schema": "multipleOf/schema_1.json", "data": "multipleOf/data_11.json", "valid": "true"},
{"phase": "by number", "test": "35 is not multiple of 1.5", "schema": "multipleOf/schema_1.json", "data": "multipleOf/data_12.json", "valid": "false", "errors": "multiple_of"},
{"phase": "by small number", "test": "0.0075 is multiple of 0.0001", "schema": "multipleOf/schema_2.json", "data": "multipleOf/data_20.json", "valid": "true"},
{"phase": "by small number", "test": "0.00751 is not multiple of 0.0001", "schema": "multipleOf/schema_2.json", "data": "multipleOf/data_21.json", "valid": "false", "errors": "multiple_of"},
{"phase": "minItems validation", "test": "longer is valid", "schema": "minItems/schema_0.json", "data": "minItems/data_00.json", "valid": "true"},
{"phase": "minItems validation", "test": "exact length is valid", "schema": "minItems/schema_0.json", "data": "minItems/data_01.json", "valid": "true"},
{"phase": "minItems validation", "test": "too short is invalid", "schema": "minItems/schema_0.json", "data": "minItems/data_02.json", "valid": "false", "errors": "array_min_items"},
{"phase": "minItems validation", "test": "ignores non-arrays", "schema": "minItems/schema_0.json", "data": "minItems/data_03.json", "valid": "true"},
{"phase": "maxItems validation", "test": "shorter is valid", "schema": "maxItems/schema_0.json", "data": "maxItems/data_00.json", "valid": "true"},
{"phase": "maxItems validation", "test": "exact length is valid", "schema": "maxItems/schema_0.json", "data": "maxItems/data_01.json", "valid": "true"},
{"phase": "maxItems validation", "test": "too long is invalid", "schema": "maxItems/schema_0.json", "data": "maxItems/data_02.json", "valid": "false", "errors": "array_max_items"},
{"phase": "maxItems validation", "test": "ignores non-arrays", "schema": "maxItems/schema_0.json", "data": "maxItems/data_03.json", "valid": "true"},
{"phase": "object properties validation", "test": "both properties present and valid is valid", "schema": "properties/schema_0.json", "data": "properties/data_00.json", "valid": "true"},
{"phase": "object properties validation", "test": "one property invalid is invalid", "schema": "properties/schema_0.json", "data": "properties/data_01.json", "valid": "false", "errors": "invalid_type"},
{"phase": "object properties validation", "test": "both properties invalid is invalid", "schema": "properties/schema_0.json", "data": "properties/data_02.json", "valid": "false", "errors": "invalid_type, invalid_type"},
{"phase": "object properties validation", "test": "doesn't invalidate other properties", "schema": "properties/schema_0.json", "data": "properties/data_03.json", "valid": "true"},
{"phase": "object properties validation", "test": "ignores non-objects", "schema": "properties/schema_0.json", "data": "properties/data_04.json", "valid": "true"},
{"phase": "properties, patternProperties, additionalProperties interaction", "test": "property validates property", "schema": "properties/schema_1.json", "data": "properties/data_10.json", "valid": "true"},
{"phase": "properties, patternProperties, additionalProperties interaction", "test": "property invalidates property", "schema": "properties/schema_1.json", "data": "properties/data_11.json", "valid": "false", "errors": "array_max_items"},
{"phase": "properties, patternProperties, additionalProperties interaction", "test": "patternProperty invalidates property", "schema": "properties/schema_1.json", "data": "properties/data_12.json", "valid": "false", "errors": "array_min_items, invalid_type"},
{"phase": "properties, patternProperties, additionalProperties interaction", "test": "patternProperty validates nonproperty", "schema": "properties/schema_1.json", "data": "properties/data_13.json", "valid": "true"},
{"phase": "properties, patternProperties, additionalProperties interaction", "test": "patternProperty invalidates nonproperty", "schema": "properties/schema_1.json", "data": "properties/data_14.json", "valid": "false", "errors": "array_min_items, invalid_type"},
{"phase": "properties, patternProperties, additionalProperties interaction", "test": "additionalProperty ignores property", "schema": "properties/schema_1.json", "data": "properties/data_15.json", "valid": "true"},
{"phase": "properties, patternProperties, additionalProperties interaction", "test": "additionalProperty validates others", "schema": "properties/schema_1.json", "data": "properties/data_16.json", "valid": "true"},
{"phase": "properties, patternProperties, additionalProperties interaction", "test": "additionalProperty invalidates others", "schema": "properties/schema_1.json", "data": "properties/data_17.json", "valid": "false", "errors": "invalid_type"},
{"phase": "root pointer ref", "test": "match", "schema": "ref/schema_0.json", "data": "ref/data_00.json", "valid": "true"},
{"phase": "root pointer ref", "test": "recursive match", "schema": "ref/schema_0.json", "data": "ref/data_01.json", "valid": "true"},
{"phase": "root pointer ref", "test": "mismatch", "schema": "ref/schema_0.json", "data": "ref/data_02.json", "valid": "false", "errors": "additional_property_not_allowed"},
{"phase": "root pointer ref", "test": "recursive mismatch", "schema": "ref/schema_0.json", "data": "ref/data_03.json", "valid": "false", "errors": "additional_property_not_allowed"},
{"phase": "relative pointer ref to object", "test": "match", "schema": "ref/schema_1.json", "data": "ref/data_10.json", "valid": "true"},
{"phase": "relative pointer ref to object", "test": "mismatch", "schema": "ref/schema_1.json", "data": "ref/data_11.json", "valid": "false", "errors": "invalid_type"},
{"phase": "relative pointer ref to array", "test": "match array", "schema": "ref/schema_2.json", "data": "ref/data_20.json", "valid": "true"},
{"phase": "relative pointer ref to array", "test": "mismatch array", "schema": "ref/schema_2.json", "data": "ref/data_21.json", "valid": "false", "errors": "invalid_type"},
{"phase": "escaped pointer ref", "test": "slash", "schema": "ref/schema_3.json", "data": "ref/data_30.json", "valid": "false", "errors": "invalid_type"},
{"phase": "escaped pointer ref", "test": "tilda", "schema": "ref/schema_3.json", "data": "ref/data_31.json", "valid": "false", "errors": "invalid_type"},
{"phase": "escaped pointer ref", "test": "percent", "schema": "ref/schema_3.json", "data": "ref/data_32.json", "valid": "false", "errors": "invalid_type"},
{"phase": "nested refs", "test": "nested ref valid", "schema": "ref/schema_4.json", "data": "ref/data_40.json", "valid": "true"},
{"phase": "nested refs", "test": "nested ref invalid", "schema": "ref/schema_4.json", "data": "ref/data_41.json", "valid": "false", "errors": "invalid_type"},
{"phase": "remote ref, containing refs itself", "test": "remote ref valid", "schema": "ref/schema_5.json", "data": "ref/data_50.json", "valid": "true"},
{"phase": "remote ref, containing refs itself", "test": "remote ref invalid", "schema": "ref/schema_5.json", "data": "ref/data_51.json", "valid": "false", "errors": "number_all_of, number_gte"},
{"phase": "a schema given for items", "test": "valid items", "schema": "items/schema_0.json", "data": "items/data_00.json", "valid": "true"},
{"phase": "a schema given for items", "test": "wrong type of items", "schema": "items/schema_0.json", "data": "items/data_01.json", "valid": "false", "errors": "invalid_type"},
{"phase": "a schema given for items", "test": "ignores non-arrays", "schema": "items/schema_0.json", "data": "items/data_02.json", "valid": "true"},
{"phase": "an array of schemas for items", "test": "correct types", "schema": "items/schema_1.json", "data": "items/data_10.json", "valid": "true"},
{"phase": "an array of schemas for items", "test": "wrong types", "schema": "items/schema_1.json", "data": "items/data_11.json", "valid": "false", "errors": "invalid_type, invalid_type"},
{"phase": "valid definition", "test": "valid definition schema", "schema": "definitions/schema_0.json", "data": "definitions/data_00.json", "valid": "true"},
{"phase": "invalid definition", "test": "invalid definition schema", "schema": "definitions/schema_1.json", "data": "definitions/data_10.json", "valid": "false", "errors": "number_any_of, enum"},
{"phase": "additionalItems as schema", "test": "additional items match schema", "schema": "additionalItems/schema_0.json", "data": "additionalItems/data_00.json", "valid": "true"},
{"phase": "additionalItems as schema", "test": "additional items do not match schema", "schema": "additionalItems/schema_0.json", "data": "additionalItems/data_01.json", "valid": "false", "errors": "invalid_type"},
{"phase": "items is schema, no additionalItems", "test": "all items match schema", "schema": "additionalItems/schema_1.json", "data": "additionalItems/data_10.json", "valid": "true"},
{"phase": "array of items with no additionalItems", "test": "no additional items present", "schema": "additionalItems/schema_2.json", "data": "additionalItems/data_20.json", "valid": "true"},
{"phase": "array of items with no additionalItems", "test": "additional items are not permitted", "schema": "additionalItems/schema_2.json", "data": "additionalItems/data_21.json", "valid": "false", "errors": "array_no_additional_items"},
{"phase": "additionalItems as false without items", "test": "items defaults to empty schema so everything is valid", "schema": "additionalItems/schema_3.json", "data": "additionalItems/data_30.json", "valid": "true"},
{"phase": "additionalItems as false without items", "test": "ignores non-arrays", "schema": "additionalItems/schema_3.json", "data": "additionalItems/data_31.json", "valid": "true"},
{"phase": "additionalItems are allowed by default", "test": "only the first item is validated", "schema": "additionalItems/schema_4.json", "data": "additionalItems/data_40.json", "valid": "true"},
{"phase": "additionalProperties being false does not allow other properties", "test": "no additional properties is valid", "schema": "additionalProperties/schema_0.json", "data": "additionalProperties/data_00.json", "valid": "true"},
{"phase": "additionalProperties being false does not allow other properties", "test": "an additional property is invalid", "schema": "additionalProperties/schema_0.json", "data": "additionalProperties/data_01.json", "valid": "false", "errors": "additional_property_not_allowed"},
{"phase": "additionalProperties being false does not allow other properties", "test": "ignores non-objects", "schema": "additionalProperties/schema_0.json", "data": "additionalProperties/data_02.json", "valid": "true"},
{"phase": "additionalProperties being false does not allow other properties", "test": "patternProperties are not additional properties", "schema": "additionalProperties/schema_0.json", "data": "additionalProperties/data_03.json", "valid": "true"},
{"phase": "additionalProperties allows a schema which should validate", "test": "no additional properties is valid", "schema": "additionalProperties/schema_1.json", "data": "additionalProperties/data_10.json", "valid": "true"},
{"phase": "additionalProperties allows a schema which should validate", "test": "an additional valid property is valid", "schema": "additionalProperties/schema_1.json", "data": "additionalProperties/data_11.json", "valid": "true"},
{"phase": "additionalProperties allows a schema which should validate", "test": "an additional invalid property is invalid", "schema": "additionalProperties/schema_1.json", "data": "additionalProperties/data_12.json", "valid": "false", "errors": "invalid_type"},
{"phase": "additionalProperties are allowed by default", "test": "additional properties are allowed", "schema": "additionalProperties/schema_2.json", "data": "additionalProperties/data_20.json", "valid": "true"},
{"phase": "dependencies", "test": "neither", "schema": "dependencies/schema_0.json", "data": "dependencies/data_00.json", "valid": "true"},
{"phase": "dependencies", "test": "nondependant", "schema": "dependencies/schema_0.json", "data": "dependencies/data_01.json", "valid": "true"},
{"phase": "dependencies", "test": "with dependency", "schema": "dependencies/schema_0.json", "data": "dependencies/data_02.json", "valid": "true"},
{"phase": "dependencies", "test": "missing dependency", "schema": "dependencies/schema_0.json", "data": "dependencies/data_03.json", "valid": "false", "errors": "missing_dependency"},
{"phase": "dependencies", "test": "ignores non-objects", "schema": "dependencies/schema_0.json", "data": "dependencies/data_04.json", "valid": "true"},
{"phase": "multiple dependencies", "test": "neither", "schema": "dependencies/schema_1.json", "data": "dependencies/data_10.json", "valid": "true"},
{"phase": "multiple dependencies", "test": "nondependants", "schema": "dependencies/schema_1.json", "data": "dependencies/data_11.json", "valid": "true"},
{"phase": "multiple dependencies", "test": "with dependencies", "schema": "dependencies/schema_1.json", "data": "dependencies/data_12.json", "valid": "true"},
{"phase": "multiple dependencies", "test": "missing dependency", "schema": "dependencies/schema_1.json", "data": "dependencies/data_13.json", "valid": "false", "errors": "missing_dependency"},
{"phase": "multiple dependencies", "test": "missing other dependency", "schema": "dependencies/schema_1.json", "data": "dependencies/data_14.json", "valid": "false", "errors": "missing_dependency"},
{"phase": "multiple dependencies", "test": "missing both dependencies", "schema": "dependencies/schema_1.json", "data": "dependencies/data_15.json", "valid": "false", "errors": "missing_dependency, missing_dependency"},
{"phase": "multiple dependencies subschema", "test": "valid", "schema": "dependencies/schema_2.json", "data": "dependencies/data_20.json", "valid": "true"},
{"phase": "multiple dependencies subschema", "test": "no dependency", "schema": "dependencies/schema_2.json", "data": "dependencies/data_21.json", "valid": "true"},
{"phase": "multiple dependencies subschema", "test": "wrong type", "schema": "dependencies/schema_2.json", "data": "dependencies/data_22.json", "valid": "false"},
{"phase": "multiple dependencies subschema", "test": "wrong type other", "schema": "dependencies/schema_2.json", "data": "dependencies/data_23.json", "valid": "false", "errors": "invalid_type"},
{"phase": "multiple dependencies subschema", "test": "wrong type both", "schema": "dependencies/schema_2.json", "data": "dependencies/data_24.json", "valid": "false", "errors": "invalid_type, invalid_type"},
{"phase": "patternProperties validates properties matching a regex", "test": "a single valid match is valid", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_00.json", "valid": "true"},
{"phase": "patternProperties validates properties matching a regex", "test": "multiple valid matches is valid", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_01.json", "valid": "true"},
{"phase": "patternProperties validates properties matching a regex", "test": "a single invalid match is invalid", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_02.json", "valid": "false", "errors": "invalid_property_pattern, invalid_type"},
{"phase": "patternProperties validates properties matching a regex", "test": "multiple invalid matches is invalid", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_03.json", "valid": "false", "errors": "invalid_property_pattern, invalid_property_pattern, invalid_type, invalid_type"},
{"phase": "patternProperties validates properties matching a regex", "test": "ignores non-objects", "schema": "patternProperties/schema_0.json", "data": "patternProperties/data_04.json", "valid": "true"},
{"phase": "patternProperties validates properties matching a regex", "test": "with additionalProperties combination", "schema": "patternProperties/schema_3.json", "data": "patternProperties/data_24.json", "valid": "false", "errors": "additional_property_not_allowed"},
{"phase": "patternProperties validates properties matching a regex", "test": "with additionalProperties combination", "schema": "patternProperties/schema_3.json", "data": "patternProperties/data_25.json", "valid": "false", "errors": "additional_property_not_allowed"},
{"phase": "patternProperties validates properties matching a regex", "test": "with additionalProperties combination", "schema": "patternProperties/schema_4.json", "data": "patternProperties/data_26.json", "valid": "false", "errors": "additional_property_not_allowed"},
{"phase": "multiple simultaneous patternProperties are validated", "test": "a single valid match is valid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_10.json", "valid": "true"},
{"phase": "multiple simultaneous patternProperties are validated", "test": "a simultaneous match is valid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_11.json", "valid": "true"},
{"phase": "multiple simultaneous patternProperties are validated", "test": "multiple matches is valid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_12.json", "valid": "true"},
{"phase": "multiple simultaneous patternProperties are validated", "test": "an invalid due to one is invalid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_13.json", "valid": "false", "errors": "invalid_property_pattern, invalid_type"},
{"phase": "multiple simultaneous patternProperties are validated", "test": "an invalid due to the other is invalid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_14.json", "valid": "false", "errors": "number_lte"},
{"phase": "multiple simultaneous patternProperties are validated", "test": "an invalid due to both is invalid", "schema": "patternProperties/schema_1.json", "data": "patternProperties/data_15.json", "valid": "false", "errors": "invalid_type, number_lte"},
{"phase": "regexes are not anchored by default and are case sensitive", "test": "non recognized members are ignored", "schema": "patternProperties/schema_2.json", "data": "patternProperties/data_20.json", "valid": "true"},
{"phase": "regexes are not anchored by default and are case sensitive", "test": "recognized members are accounted for", "schema": "patternProperties/schema_2.json", "data": "patternProperties/data_21.json", "valid": "false", "errors": "invalid_property_pattern, invalid_type"},
{"phase": "regexes are not anchored by default and are case sensitive", "test": "regexes are case sensitive", "schema": "patternProperties/schema_2.json", "data": "patternProperties/data_22.json", "valid": "true"},
{"phase": "regexes are not anchored by default and are case sensitive", "test": "regexes are case sensitive, 2", "schema": "patternProperties/schema_2.json", "data": "patternProperties/data_23.json", "valid": "false", "errors": "invalid_property_pattern, invalid_type"},
{"phase": "remote ref", "test": "remote ref valid", "schema": "refRemote/schema_0.json", "data": "refRemote/data_00.json", "valid": "true"},
{"phase": "remote ref", "test": "remote ref invalid", "schema": "refRemote/schema_0.json", "data": "refRemote/data_01.json", "valid": "false", "errors": "invalid_type"},
{"phase": "fragment within remote ref", "test": "remote fragment valid", "schema": "refRemote/schema_1.json", "data": "refRemote/data_10.json", "valid": "true"},
{"phase": "fragment within remote ref", "test": "remote fragment invalid", "schema": "refRemote/schema_1.json", "data": "refRemote/data_11.json", "valid": "false", "errors": "invalid_type"},
{"phase": "ref within remote ref", "test": "ref within ref valid", "schema": "refRemote/schema_2.json", "data": "refRemote/data_20.json", "valid": "true"},
{"phase": "ref within remote ref", "test": "ref within ref invalid", "schema": "refRemote/schema_2.json", "data": "refRemote/data_21.json", "valid": "false", "errors": "invalid_type"},
{"phase": "format validation", "test": "email format is invalid", "schema": "format/schema_0.json", "data": "format/data_00.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "email format is invalid", "schema": "format/schema_0.json", "data": "format/data_01.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "email format valid", "schema": "format/schema_0.json", "data": "format/data_02.json", "valid": "true"},
{"phase": "format validation", "test": "invoice format valid", "schema": "format/schema_1.json", "data": "format/data_03.json", "valid": "true"},
{"phase": "format validation", "test": "invoice format is invalid", "schema": "format/schema_1.json", "data": "format/data_04.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_05.json", "valid": "true"},
{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_06.json", "valid": "true"},
{"phase": "format validation", "test": "date-time format is invalid", "schema": "format/schema_2.json", "data": "format/data_07.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_08.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_09.json", "valid": "true"},
{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_10.json", "valid": "true"},
{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_11.json", "valid": "true"},
{"phase": "format validation", "test": "date-time format is valid", "schema": "format/schema_2.json", "data": "format/data_12.json", "valid": "true"},
{"phase": "format validation", "test": "hostname format is valid", "schema": "format/schema_3.json", "data": "format/data_13.json", "valid": "true"},
{"phase": "format validation", "test": "hostname format is valid", "schema": "format/schema_3.json", "data": "format/data_14.json", "valid": "true"},
{"phase": "format validation", "test": "hostname format is valid", "schema": "format/schema_3.json", "data": "format/data_15.json", "valid": "true"},
{"phase": "format validation", "test": "hostname format is invalid", "schema": "format/schema_3.json", "data": "format/data_16.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "hostname format is invalid", "schema": "format/schema_3.json", "data": "format/data_17.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "ipv4 format is valid", "schema": "format/schema_4.json", "data": "format/data_18.json", "valid": "true"},
{"phase": "format validation", "test": "ipv4 format is invalid", "schema": "format/schema_4.json", "data": "format/data_19.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "ipv6 format is valid", "schema": "format/schema_5.json", "data": "format/data_20.json", "valid": "true"},
{"phase": "format validation", "test": "ipv6 format is valid", "schema": "format/schema_5.json", "data": "format/data_21.json", "valid": "true"},
{"phase": "format validation", "test": "ipv6 format is invalid", "schema": "format/schema_5.json", "data": "format/data_22.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "ipv6 format is invalid", "schema": "format/schema_5.json", "data": "format/data_23.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "uri format is valid", "schema": "format/schema_6.json", "data": "format/data_24.json", "valid": "true"},
{"phase": "format validation", "test": "uri format is valid", "schema": "format/schema_6.json", "data": "format/data_25.json", "valid": "true"},
{"phase": "format validation", "test": "uri format is valid", "schema": "format/schema_6.json", "data": "format/data_26.json", "valid": "true"},
{"phase": "format validation", "test": "uri format is valid", "schema": "format/schema_6.json", "data": "format/data_27.json", "valid": "true"},
{"phase": "format validation", "test": "uri format is invalid", "schema": "format/schema_6.json", "data": "format/data_28.json", "valid": "false", "errors": "format"},
{"phase": "format validation", "test": "uri format is invalid", "schema": "format/schema_6.json", "data": "format/data_13.json", "valid": "false", "errors": "format"},
}
//TODO Pass failed tests : id(s) as scope for references is not implemented yet
@ -414,3 +419,149 @@ func TestJsonSchemaTestSuite(t *testing.T) {
fmt.Printf("\n%d tests performed / %d total tests to perform ( %.2f %% )\n", len(JsonSchemaTestSuiteMap), 248, float32(len(JsonSchemaTestSuiteMap))/248.0*100.0)
}
const circularReference = `{
"type": "object",
"properties": {
"games": {
"type": "array",
"items": {
"$ref": "#/definitions/game"
}
}
},
"definitions": {
"game": {
"type": "object",
"properties": {
"winner": {
"$ref": "#/definitions/player"
},
"loser": {
"$ref": "#/definitions/player"
}
}
},
"player": {
"type": "object",
"properties": {
"user": {
"$ref": "#/definitions/user"
},
"game": {
"$ref": "#/definitions/game"
}
}
},
"user": {
"type": "object",
"properties": {
"fullName": {
"type": "string"
}
}
}
}
}`
func TestCircularReference(t *testing.T) {
loader := NewStringLoader(circularReference)
// call the target function
_, err := NewSchema(loader)
if err != nil {
t.Errorf("Got error: %s", err.Error())
}
}
// From http://json-schema.org/examples.html
const simpleSchema = `{
"title": "Example Schema",
"type": "object",
"properties": {
"firstName": {
"type": "string"
},
"lastName": {
"type": "string"
},
"age": {
"description": "Age in years",
"type": "integer",
"minimum": 0
}
},
"required": ["firstName", "lastName"]
}`
func TestLoaders(t *testing.T) {
// setup reader loader
reader := bytes.NewBufferString(simpleSchema)
readerLoader, wrappedReader := NewReaderLoader(reader)
// drain reader
by, err := ioutil.ReadAll(wrappedReader)
assert.Nil(t, err)
assert.Equal(t, simpleSchema, string(by))
// setup writer loaders
writer := &bytes.Buffer{}
writerLoader, wrappedWriter := NewWriterLoader(writer)
// fill writer
n, err := io.WriteString(wrappedWriter, simpleSchema)
assert.Nil(t, err)
assert.Equal(t, n, len(simpleSchema))
loaders := []JSONLoader{
NewStringLoader(simpleSchema),
readerLoader,
writerLoader,
}
for _, l := range loaders {
_, err := NewSchema(l)
assert.Nil(t, err, "loader: %T", l)
}
}
const invalidPattern = `{
"title": "Example Pattern",
"type": "object",
"properties": {
"invalid": {
"type": "string",
"pattern": 99999
}
}
}`
func TestLoadersWithInvalidPattern(t *testing.T) {
// setup reader loader
reader := bytes.NewBufferString(invalidPattern)
readerLoader, wrappedReader := NewReaderLoader(reader)
// drain reader
by, err := ioutil.ReadAll(wrappedReader)
assert.Nil(t, err)
assert.Equal(t, invalidPattern, string(by))
// setup writer loaders
writer := &bytes.Buffer{}
writerLoader, wrappedWriter := NewWriterLoader(writer)
// fill writer
n, err := io.WriteString(wrappedWriter, invalidPattern)
assert.Nil(t, err)
assert.Equal(t, n, len(invalidPattern))
loaders := []JSONLoader{
NewStringLoader(invalidPattern),
readerLoader,
writerLoader,
}
for _, l := range loaders {
_, err := NewSchema(l)
assert.NotNil(t, err, "expected error loading invalid pattern: %T", l)
}
}

@ -214,7 +214,7 @@ func (s *subSchema) PatternPropertiesString() string {
}
patternPropertiesKeySlice := []string{}
for pk, _ := range s.patternProperties {
for pk := range s.patternProperties {
patternPropertiesKeySlice = append(patternPropertiesKeySlice, `"`+pk+`"`)
}

@ -34,7 +34,12 @@ import (
)
func isKind(what interface{}, kind reflect.Kind) bool {
return reflect.ValueOf(what).Kind() == kind
target := what
if isJsonNumber(what) {
// JSON Numbers are strings!
target = *mustBeNumber(what)
}
return reflect.ValueOf(target).Kind() == kind
}
func existsMapKey(m map[string]interface{}, k string) bool {

@ -55,7 +55,7 @@ func (v *Schema) Validate(l JSONLoader) (*Result, error) {
// load document
root, err := l.loadJSON()
root, err := l.LoadJSON()
if err != nil {
return nil, err
}
@ -412,7 +412,7 @@ func (v *subSchema) validateArray(currentSubSchema *subSchema, value []interface
internalLog(" %v", value)
}
nbItems := len(value)
nbValues := len(value)
// TODO explain
if currentSubSchema.itemsChildrenIsSingleSchema {
@ -425,15 +425,18 @@ func (v *subSchema) validateArray(currentSubSchema *subSchema, value []interface
if currentSubSchema.itemsChildren != nil && len(currentSubSchema.itemsChildren) > 0 {
nbItems := len(currentSubSchema.itemsChildren)
nbValues := len(value)
if nbItems == nbValues {
for i := 0; i != nbItems; i++ {
subContext := newJsonContext(strconv.Itoa(i), context)
validationResult := currentSubSchema.itemsChildren[i].subValidateWithContext(value[i], subContext)
result.mergeErrors(validationResult)
}
} else if nbItems < nbValues {
// while we have both schemas and values, check them against each other
for i := 0; i != nbItems && i != nbValues; i++ {
subContext := newJsonContext(strconv.Itoa(i), context)
validationResult := currentSubSchema.itemsChildren[i].subValidateWithContext(value[i], subContext)
result.mergeErrors(validationResult)
}
if nbItems < nbValues {
// we have less schemas than elements in the instance array,
// but that might be ok if "additionalItems" is specified.
switch currentSubSchema.additionalItems.(type) {
case bool:
if !currentSubSchema.additionalItems.(bool) {
@ -453,7 +456,7 @@ func (v *subSchema) validateArray(currentSubSchema *subSchema, value []interface
// minItems & maxItems
if currentSubSchema.minItems != nil {
if nbItems < int(*currentSubSchema.minItems) {
if nbValues < int(*currentSubSchema.minItems) {
result.addError(
new(ArrayMinItemsError),
context,
@ -463,7 +466,7 @@ func (v *subSchema) validateArray(currentSubSchema *subSchema, value []interface
}
}
if currentSubSchema.maxItems != nil {
if nbItems > int(*currentSubSchema.maxItems) {
if nbValues > int(*currentSubSchema.maxItems) {
result.addError(
new(ArrayMaxItemsError),
context,