diff --git a/src/github.com/matrix-org/dendrite/cmd/mediaapi-integration-tests/main.go b/src/github.com/matrix-org/dendrite/cmd/mediaapi-integration-tests/main.go index d8b457d6..cc845119 100644 --- a/src/github.com/matrix-org/dendrite/cmd/mediaapi-integration-tests/main.go +++ b/src/github.com/matrix-org/dendrite/cmd/mediaapi-integration-tests/main.go @@ -239,7 +239,7 @@ func testDownload(host, origin, mediaID string, wantedStatusCode int, serverCmdC testReq := &test.Request{ Req: req, WantedStatusCode: wantedStatusCode, - WantedBody: test.CanonicalJSONInput([]string{""})[0], + WantedBody: "", } testReq.Run(fmt.Sprintf("download mxc://%v/%v from %v", origin, mediaID, host), timeout, serverCmdChan) } @@ -263,7 +263,7 @@ func testThumbnail(width, height int, resizeMethod, host string, serverCmdChan c testReq := &test.Request{ Req: req, WantedStatusCode: 200, - WantedBody: test.CanonicalJSONInput([]string{""})[0], + WantedBody: "", } testReq.Run(fmt.Sprintf("thumbnail mxc://%v/%v%v from %v", testOrigin, testMediaID, query, host), timeout, serverCmdChan) } diff --git a/src/github.com/matrix-org/dendrite/common/keydb/keydb.go b/src/github.com/matrix-org/dendrite/common/keydb/keydb.go index 9e59f0ca..8d2be24d 100644 --- a/src/github.com/matrix-org/dendrite/common/keydb/keydb.go +++ b/src/github.com/matrix-org/dendrite/common/keydb/keydb.go @@ -44,6 +44,11 @@ func NewDatabase(dataSourceName string) (*Database, error) { return d, nil } +// FetcherName implements KeyFetcher +func (d Database) FetcherName() string { + return "KeyDatabase" +} + // FetchKeys implements gomatrixserverlib.KeyDatabase func (d *Database) FetchKeys( ctx context.Context, diff --git a/vendor/manifest b/vendor/manifest index ba6dab4c..830d3e2d 100644 --- a/vendor/manifest +++ b/vendor/manifest @@ -135,7 +135,7 @@ { "importpath": "github.com/matrix-org/gomatrixserverlib", "repository": "https://github.com/matrix-org/gomatrixserverlib", - "revision": "076933f95312aae3a9476e78d6b4118e1b45d542", + "revision": "8540d3dfc13c797cd3200640bc06e0286ab355aa", "branch": "master" }, { @@ -274,6 +274,24 @@ "branch": "master", "path": "/require" }, + { + "importpath": "github.com/tidwall/gjson", + "repository": "https://github.com/tidwall/gjson", + "revision": "67e2a63ac70d273b6bc7589f12f07180bc9fc189", + "branch": "master" + }, + { + "importpath": "github.com/tidwall/match", + "repository": "https://github.com/tidwall/match", + "revision": "1731857f09b1f38450e2c12409748407822dc6be", + "branch": "master" + }, + { + "importpath": "github.com/tidwall/sjson", + "repository": "https://github.com/tidwall/sjson", + "revision": "6a22caf2fd45d5e2119bfc3717e984f15a7eb7ee", + "branch": "master" + }, { "importpath": "github.com/tj/go-debug", "repository": "https://github.com/tj/go-debug", diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/client.go b/vendor/src/github.com/matrix-org/gomatrixserverlib/client.go index cad361ae..e47be7fd 100644 --- a/vendor/src/github.com/matrix-org/gomatrixserverlib/client.go +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/client.go @@ -175,7 +175,29 @@ func (fc *Client) LookupUserInfo( return } -// LookupServerKeys lookups up the keys for a matrix server from a matrix server. +// GetServerKeys asks a matrix server for its signing keys and TLS cert +func (fc *Client) GetServerKeys( + ctx context.Context, matrixServer ServerName, +) (ServerKeys, error) { + url := url.URL{ + Scheme: "matrix", + Host: string(matrixServer), + Path: "/_matrix/key/v2/server", + } + + var body ServerKeys + req, err := http.NewRequest("GET", url.String(), nil) + if err != nil { + return body, err + } + + err = fc.DoRequestAndParseResponse( + ctx, req, &body, + ) + return body, err +} + +// LookupServerKeys looks up the keys for a matrix server from a matrix server. // The first argument is the name of the matrix server to download the keys from. // The second argument is a map from (server name, key ID) pairs to timestamps. // The (server name, key ID) pair identifies the key to download. diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/event.go b/vendor/src/github.com/matrix-org/gomatrixserverlib/event.go index 2a26e7af..f873cb5b 100644 --- a/vendor/src/github.com/matrix-org/gomatrixserverlib/event.go +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/event.go @@ -16,11 +16,13 @@ package gomatrixserverlib import ( + "bytes" "encoding/json" "fmt" "strings" "time" + "github.com/tidwall/sjson" "golang.org/x/crypto/ed25519" ) @@ -183,37 +185,52 @@ func (eb *EventBuilder) Build(eventID string, now time.Time, origin ServerName, // It also checks the content hashes to ensure the event has not been tampered with. // This should be used when receiving new events from remote servers. func NewEventFromUntrustedJSON(eventJSON []byte) (result Event, err error) { - var event map[string]rawJSON - if err = json.Unmarshal(eventJSON, &event); err != nil { + // We parse the JSON early on so that we don't have to check if the JSON + // is valid + if err = json.Unmarshal(eventJSON, &result.fields); err != nil { return } + // Synapse removes these keys from events in case a server accidentally added them. // https://github.com/matrix-org/synapse/blob/v0.18.5/synapse/crypto/event_signing.py#L57-L62 - delete(event, "outlier") - delete(event, "destinations") - delete(event, "age_ts") - - if eventJSON, err = json.Marshal(event); err != nil { - return - } - - if err = checkEventContentHash(eventJSON); err != nil { - result.redacted = true - // If the content hash doesn't match then we have to discard all non-essential fields - // because they've been tampered with. - if eventJSON, err = redactEvent(eventJSON); err != nil { + for _, key := range []string{"outlier", "destinations", "age_ts"} { + if eventJSON, err = sjson.DeleteBytes(eventJSON, key); err != nil { return } } - if eventJSON, err = CanonicalJSON(eventJSON); err != nil { - return + // We know the JSON must be valid here. + eventJSON = CanonicalJSONAssumeValid(eventJSON) + + if err = checkEventContentHash(eventJSON); err != nil { + result.redacted = true + + // If the content hash doesn't match then we have to discard all non-essential fields + // because they've been tampered with. + var redactedJSON []byte + if redactedJSON, err = redactEvent(eventJSON); err != nil { + return + } + + redactedJSON = CanonicalJSONAssumeValid(redactedJSON) + + // We need to ensure that `result` is the redacted event. + // If redactedJSON is the same as eventJSON then `result` is already + // correct. If not then we need to reparse. + // + // Yes, this means that for some events we parse twice (which is slow), + // but means that parsing unredacted events is fast. + if !bytes.Equal(redactedJSON, eventJSON) { + result = Event{redacted: true} + if err = json.Unmarshal(redactedJSON, &result.fields); err != nil { + return + } + } + + eventJSON = redactedJSON } result.eventJSON = eventJSON - if err = json.Unmarshal(eventJSON, &result.fields); err != nil { - return - } if err = result.CheckFields(); err != nil { return diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/event_test.go b/vendor/src/github.com/matrix-org/gomatrixserverlib/event_test.go new file mode 100644 index 00000000..d5c715da --- /dev/null +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/event_test.go @@ -0,0 +1,52 @@ +/* Copyright 2017 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package gomatrixserverlib + +import ( + "encoding/json" + "testing" +) + +func benchmarkParse(b *testing.B, eventJSON string) { + var event Event + + // run the Unparse function b.N times + for n := 0; n < b.N; n++ { + if err := json.Unmarshal([]byte(eventJSON), &event); err != nil { + b.Error("Failed to parse event") + } + } +} + +// Benchmark a more complicated event, in this case a power levels event. + +func BenchmarkParseLargerEvent(b *testing.B) { + benchmarkParse(b, `{"auth_events":[["$Stdin0028C5qBjz5:localhost",{"sha256":"PvTyW+Mfb0aCajkIlBk1XlQE+1uVco3to8C2+/1J7iQ"}],["$klXtjBwwDQIGglax:localhost",{"sha256":"hLoiSkcGLZJr5wkIDA8+bujNJPsYX1SOCCXIErHEcgM"}]],"content":{"ban":50,"events":{"m.room.avatar":50,"m.room.canonical_alias":50,"m.room.history_visibility":100,"m.room.name":50,"m.room.power_levels":100},"events_default":0,"invite":0,"kick":50,"redact":50,"state_default":50,"users":{"@test:localhost":100},"users_default":0},"depth":3,"event_id":"$7gPR7SLdkfDsMvJL:localhost","hashes":{"sha256":"/kQnrzO5vhbnwyGvKso4CVMRyyryiyanq6t27mt5kSw"},"origin":"localhost","origin_server_ts":1510854446548,"prev_events":[["$klXtjBwwDQIGglax:localhost",{"sha256":"hLoiSkcGLZJr5wkIDA8+bujNJPsYX1SOCCXIErHEcgM"}]],"prev_state":[],"room_id":"!pUjJbIC8V32G0FLt:localhost","sender":"@test:localhost","signatures":{"localhost":{"ed25519:u9kP":"NOxjrcci7AIRhcTVmJ6nrsslLsaOJzB0iusDZ6cOFrv2OXkDY7mrBM3cQQS3DhGWltEtu3OC0nsvkfeYtwr9DQ"}},"state_key":"","type":"m.room.power_levels"}`) +} + +// Lets now test parsing a smaller name event, first one that is valid, then wrong hash, and then the redacted one + +func BenchmarkParseSmallerEvent(b *testing.B) { + benchmarkParse(b, `{"auth_events":[["$oXL79cT7fFxR7dPH:localhost",{"sha256":"abjkiDSg1RkuZrbj2jZoGMlQaaj1Ue3Jhi7I7NlKfXY"}],["$IVUsaSkm1LBAZYYh:localhost",{"sha256":"X7RUj46hM/8sUHNBIFkStbOauPvbDzjSdH4NibYWnko"}],["$VS2QT0EeArZYi8wf:localhost",{"sha256":"k9eM6utkCH8vhLW9/oRsH74jOBS/6RVK42iGDFbylno"}]],"content":{"name":"test3"},"depth":7,"event_id":"$yvN1b43rlmcOs5fY:localhost","hashes":{"sha256":"Oh1mwI1jEqZ3tgJ+V1Dmu5nOEGpCE4RFUqyJv2gQXKs"},"origin":"localhost","origin_server_ts":1510854416361,"prev_events":[["$FqI6TVvWpcbcnJ97:localhost",{"sha256":"upCsBqUhNUgT2/+zkzg8TbqdQpWWKQnZpGJc6KcbUC4"}]],"prev_state":[],"room_id":"!19Mp0U9hjajeIiw1:localhost","sender":"@test:localhost","signatures":{"localhost":{"ed25519:u9kP":"5IzSuRXkxvbTp0vZhhXYZeOe+619iG3AybJXr7zfNn/4vHz4TH7qSJVQXSaHHvcTcDodAKHnTG1WDulgO5okAQ"}},"state_key":"","type":"m.room.name"}`) +} + +func BenchmarkParseSmallerEventFailedHash(b *testing.B) { + benchmarkParse(b, `{"auth_events":[["$oXL79cT7fFxR7dPH:localhost",{"sha256":"abjkiDSg1RkuZrbj2jZoGMlQaaj1Ue3Jhi7I7NlKfXY"}],["$IVUsaSkm1LBAZYYh:localhost",{"sha256":"X7RUj46hM/8sUHNBIFkStbOauPvbDzjSdH4NibYWnko"}],["$VS2QT0EeArZYi8wf:localhost",{"sha256":"k9eM6utkCH8vhLW9/oRsH74jOBS/6RVK42iGDFbylno"}]],"content":{"name":"test4"},"depth":7,"event_id":"$yvN1b43rlmcOs5fY:localhost","hashes":{"sha256":"Oh1mwI1jEqZ3tgJ+V1Dmu5nOEGpCE4RFUqyJv2gQXKs"},"origin":"localhost","origin_server_ts":1510854416361,"prev_events":[["$FqI6TVvWpcbcnJ97:localhost",{"sha256":"upCsBqUhNUgT2/+zkzg8TbqdQpWWKQnZpGJc6KcbUC4"}]],"prev_state":[],"room_id":"!19Mp0U9hjajeIiw1:localhost","sender":"@test:localhost","signatures":{"localhost":{"ed25519:u9kP":"5IzSuRXkxvbTp0vZhhXYZeOe+619iG3AybJXr7zfNn/4vHz4TH7qSJVQXSaHHvcTcDodAKHnTG1WDulgO5okAQ"}},"state_key":"","type":"m.room.name"}`) +} + +func BenchmarkParseSmallerEventRedacted(b *testing.B) { + benchmarkParse(b, `{"event_id":"$yvN1b43rlmcOs5fY:localhost","sender":"@test:localhost","room_id":"!19Mp0U9hjajeIiw1:localhost","hashes":{"sha256":"Oh1mwI1jEqZ3tgJ+V1Dmu5nOEGpCE4RFUqyJv2gQXKs"},"signatures":{"localhost":{"ed25519:u9kP":"5IzSuRXkxvbTp0vZhhXYZeOe+619iG3AybJXr7zfNn/4vHz4TH7qSJVQXSaHHvcTcDodAKHnTG1WDulgO5okAQ"}},"content":{},"type":"m.room.name","state_key":"","depth":7,"prev_events":[["$FqI6TVvWpcbcnJ97:localhost",{"sha256":"upCsBqUhNUgT2/+zkzg8TbqdQpWWKQnZpGJc6KcbUC4"}]],"prev_state":[],"auth_events":[["$oXL79cT7fFxR7dPH:localhost",{"sha256":"abjkiDSg1RkuZrbj2jZoGMlQaaj1Ue3Jhi7I7NlKfXY"}],["$IVUsaSkm1LBAZYYh:localhost",{"sha256":"X7RUj46hM/8sUHNBIFkStbOauPvbDzjSdH4NibYWnko"}],["$VS2QT0EeArZYi8wf:localhost",{"sha256":"k9eM6utkCH8vhLW9/oRsH74jOBS/6RVK42iGDFbylno"}]],"origin":"localhost","origin_server_ts":1510854416361}`) +} diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/eventcrypto.go b/vendor/src/github.com/matrix-org/gomatrixserverlib/eventcrypto.go index 255b269d..6b11ed44 100644 --- a/vendor/src/github.com/matrix-org/gomatrixserverlib/eventcrypto.go +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/eventcrypto.go @@ -22,6 +22,8 @@ import ( "encoding/json" "fmt" + "github.com/tidwall/gjson" + "github.com/tidwall/sjson" "golang.org/x/crypto/ed25519" ) @@ -68,40 +70,28 @@ func addContentHashesToEvent(eventJSON []byte) ([]byte, error) { } // checkEventContentHash checks if the unredacted content of the event matches the SHA-256 hash under the "hashes" key. +// Assumes that eventJSON has been canonicalised already. func checkEventContentHash(eventJSON []byte) error { - var event map[string]rawJSON + var err error - if err := json.Unmarshal(eventJSON, &event); err != nil { + result := gjson.GetBytes(eventJSON, "hashes.sha256") + var hash Base64String + if err = hash.Decode(result.Str); err != nil { return err } - hashesJSON := event["hashes"] + hashableEventJSON := eventJSON - delete(event, "signatures") - delete(event, "unsigned") - delete(event, "hashes") - - var hashes struct { - Sha256 Base64String `json:"sha256"` - } - if err := json.Unmarshal(hashesJSON, &hashes); err != nil { - return err - } - - hashableEventJSON, err := json.Marshal(event) - if err != nil { - return err - } - - hashableEventJSON, err = CanonicalJSON(hashableEventJSON) - if err != nil { - return err + for _, key := range []string{"signatures", "unsigned", "hashes"} { + if hashableEventJSON, err = sjson.DeleteBytes(hashableEventJSON, key); err != nil { + return err + } } sha256Hash := sha256.Sum256(hashableEventJSON) - if !bytes.Equal(sha256Hash[:], []byte(hashes.Sha256)) { - return fmt.Errorf("Invalid Sha256 content hash: %v != %v", sha256Hash[:], []byte(hashes.Sha256)) + if !bytes.Equal(sha256Hash[:], []byte(hash)) { + return fmt.Errorf("Invalid Sha256 content hash: %v != %v", sha256Hash[:], []byte(hash)) } return nil diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/federationtypes.go b/vendor/src/github.com/matrix-org/gomatrixserverlib/federationtypes.go index d4cfab86..ac734ed0 100644 --- a/vendor/src/github.com/matrix-org/gomatrixserverlib/federationtypes.go +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/federationtypes.go @@ -4,6 +4,8 @@ import ( "context" "encoding/json" "fmt" + + "github.com/matrix-org/util" ) // A RespSend is the content of a response to PUT /_matrix/federation/v1/send/{txnID}/ @@ -109,6 +111,7 @@ func (r RespState) Events() ([]Event, error) { // Check that a response to /state is valid. func (r RespState) Check(ctx context.Context, keyRing JSONVerifier) error { + logger := util.GetLogger(ctx) var allEvents []Event for _, event := range r.AuthEvents { if event.StateKey() == nil { @@ -134,8 +137,9 @@ func (r RespState) Check(ctx context.Context, keyRing JSONVerifier) error { } // Check if the events pass signature checks. + logger.Infof("Checking event signatures for %d events of room state", len(allEvents)) if err := VerifyEventSignatures(ctx, allEvents, keyRing); err != nil { - return nil + return err } eventsByID := map[string]*Event{} diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/hooks/pre-commit b/vendor/src/github.com/matrix-org/gomatrixserverlib/hooks/pre-commit index 517e3e4f..44cc40d5 100644 --- a/vendor/src/github.com/matrix-org/gomatrixserverlib/hooks/pre-commit +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/hooks/pre-commit @@ -2,6 +2,25 @@ set -eu +# make the GIT_DIR and GIT_INDEX_FILE absolute, before we change dir +export GIT_DIR=$(readlink -f `git rev-parse --git-dir`) +if [ -n "${GIT_INDEX_FILE:+x}" ]; then + export GIT_INDEX_FILE=$(readlink -f "$GIT_INDEX_FILE") +fi + +wd=`pwd` + +# create a temp dir. The `trap` incantation will ensure that it is removed +# again when this script completes. +tmpdir=`mktemp -d` +trap 'rm -rf "$tmpdir"' EXIT +cd "$tmpdir" + +# get a clean copy of the index (ie, what has been `git add`ed), so that we can +# run the checks against what we are about to commit, rather than what is in +# the working copy. +git checkout-index -a + echo "Installing lint search engine..." go get github.com/alecthomas/gometalinter/ gometalinter --config=linter.json --install --update diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/json.go b/vendor/src/github.com/matrix-org/gomatrixserverlib/json.go index e29f283d..b9f76f48 100644 --- a/vendor/src/github.com/matrix-org/gomatrixserverlib/json.go +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/json.go @@ -16,66 +16,73 @@ package gomatrixserverlib import ( - "bytes" "encoding/binary" - "encoding/json" "sort" "unicode/utf8" + + "github.com/pkg/errors" + "github.com/tidwall/gjson" ) // CanonicalJSON re-encodes the JSON in a canonical encoding. The encoding is // the shortest possible encoding using integer values with sorted object keys. // https://matrix.org/docs/spec/server_server/unstable.html#canonical-json func CanonicalJSON(input []byte) ([]byte, error) { - sorted, err := SortJSON(input, make([]byte, 0, len(input))) - if err != nil { - return nil, err + if !gjson.Valid(string(input)) { + return nil, errors.Errorf("invalid json") } - return CompactJSON(sorted, make([]byte, 0, len(sorted))), nil + + return CanonicalJSONAssumeValid(input), nil +} + +// CanonicalJSONAssumeValid is the same as CanonicalJSON, but assumes the +// input is valid JSON +func CanonicalJSONAssumeValid(input []byte) []byte { + input = CompactJSON(input, make([]byte, 0, len(input))) + return SortJSON(input, make([]byte, 0, len(input))) } // SortJSON reencodes the JSON with the object keys sorted by lexicographically // by codepoint. The input must be valid JSON. -func SortJSON(input, output []byte) ([]byte, error) { - // Skip to the first character that isn't whitespace. - var decoded interface{} +func SortJSON(input, output []byte) []byte { + result := gjson.ParseBytes(input) - decoder := json.NewDecoder(bytes.NewReader(input)) - decoder.UseNumber() - if err := decoder.Decode(&decoded); err != nil { - return nil, err - } - return sortJSONValue(decoded, output) + rawJSON := rawJSONFromResult(result, input) + return sortJSONValue(result, rawJSON, output) } -func sortJSONValue(input interface{}, output []byte) ([]byte, error) { - switch value := input.(type) { - case []interface{}: - // If the JSON is an array then we need to sort the keys of its children. - return sortJSONArray(value, output) - case map[string]interface{}: - // If the JSON is an object then we need to sort its keys and the keys of its children. - return sortJSONObject(value, output) - default: - // Otherwise the JSON is a value and can be encoded without any further sorting. - bytes, err := json.Marshal(value) - if err != nil { - return nil, err - } - return append(output, bytes...), nil +// sortJSONValue takes a gjson.Result and sorts it. inputJSON must be the +// raw JSON bytes that gjson.Result points to. +func sortJSONValue(input gjson.Result, inputJSON, output []byte) []byte { + if input.IsArray() { + return sortJSONArray(input, inputJSON, output) } + + if input.IsObject() { + return sortJSONObject(input, inputJSON, output) + } + + // If its neither an object nor an array then there is no sub structure + // to sort, so just append the raw bytes. + return append(output, inputJSON...) } -func sortJSONArray(input []interface{}, output []byte) ([]byte, error) { - var err error +// sortJSONArray takes a gjson.Result and sorts it, assuming its an array. +// inputJSON must be the raw JSON bytes that gjson.Result points to. +func sortJSONArray(input gjson.Result, inputJSON, output []byte) []byte { sep := byte('[') - for _, value := range input { + + // Iterate over each value in the array and sort it. + input.ForEach(func(_, value gjson.Result) bool { output = append(output, sep) sep = ',' - if output, err = sortJSONValue(value, output); err != nil { - return nil, err - } - } + + rawJSON := rawJSONFromResult(value, inputJSON) + output = sortJSONValue(value, rawJSON, output) + + return true // keep iterating + }) + if sep == '[' { // If sep is still '[' then the array was empty and we never wrote the // initial '[', so we write it now along with the closing ']'. @@ -84,31 +91,49 @@ func sortJSONArray(input []interface{}, output []byte) ([]byte, error) { // Otherwise we end the array by writing a single ']' output = append(output, ']') } - return output, nil + return output } -func sortJSONObject(input map[string]interface{}, output []byte) ([]byte, error) { - var err error - keys := make([]string, len(input)) - var j int - for key := range input { - keys[j] = key - j++ +// sortJSONObject takes a gjson.Result and sorts it, assuming its an object. +// inputJSON must be the raw JSON bytes that gjson.Result points to. +func sortJSONObject(input gjson.Result, inputJSON, output []byte) []byte { + type entry struct { + key string // The parsed key string + rawKey []byte // The raw, unparsed key JSON string + value gjson.Result } - sort.Strings(keys) + + var entries []entry + + // Iterate over each key/value pair and add it to a slice + // that we can sort + input.ForEach(func(key, value gjson.Result) bool { + entries = append(entries, entry{ + key: key.String(), + rawKey: rawJSONFromResult(key, inputJSON), + value: value, + }) + return true // keep iterating + }) + + // Sort the slice based on the *parsed* key + sort.Slice(entries, func(a, b int) bool { + return entries[a].key < entries[b].key + }) + sep := byte('{') - for _, key := range keys { + + for _, entry := range entries { output = append(output, sep) sep = ',' - var encoded []byte - if encoded, err = json.Marshal(key); err != nil { - return nil, err - } - output = append(output, encoded...) + + // Append the raw unparsed JSON key, *not* the parsed key + output = append(output, entry.rawKey...) output = append(output, ':') - if output, err = sortJSONValue(input[key], output); err != nil { - return nil, err - } + + rawJSON := rawJSONFromResult(entry.value, inputJSON) + + output = sortJSONValue(entry.value, rawJSON, output) } if sep == '{' { // If sep is still '{' then the object was empty and we never wrote the @@ -118,7 +143,7 @@ func sortJSONObject(input map[string]interface{}, output []byte) ([]byte, error) // Otherwise we end the object by writing a single '}' output = append(output, '}') } - return output, nil + return output } // CompactJSON makes the encoded JSON as small as possible by removing @@ -237,3 +262,19 @@ func readHexDigits(input []byte) uint32 { hex |= hex >> 8 return hex & 0xFFFF } + +// rawJSONFromResult extracts the raw JSON bytes pointed to by result. +// input must be the json bytes that were used to generate result +func rawJSONFromResult(result gjson.Result, input []byte) (rawJSON []byte) { + // This is lifted from gjson README. Basically, result.Raw is a copy of + // the bytes we want, but its more efficient to take a slice. + // If Index is 0 then for some reason we can't extract it from the original + // JSON bytes. + if result.Index > 0 { + rawJSON = input[result.Index : result.Index+len(result.Raw)] + } else { + rawJSON = []byte(result.Raw) + } + + return +} diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/json_test.go b/vendor/src/github.com/matrix-org/gomatrixserverlib/json_test.go index 2e6a2bc3..b58d94a8 100644 --- a/vendor/src/github.com/matrix-org/gomatrixserverlib/json_test.go +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/json_test.go @@ -20,10 +20,8 @@ import ( ) func testSortJSON(t *testing.T, input, want string) { - got, err := SortJSON([]byte(input), nil) - if err != nil { - t.Error(err) - } + got := SortJSON([]byte(input), nil) + // Squash out the whitespace before comparing the JSON in case SortJSON had inserted whitespace. if string(CompactJSON(got, nil)) != want { t.Errorf("SortJSON(%q): want %q got %q", input, want, got) @@ -36,6 +34,7 @@ func TestSortJSON(t *testing.T) { `{"A":{"1":1,"2":2},"B":{"3":3,"4":4}}`) testSortJSON(t, `[true,false,null]`, `[true,false,null]`) testSortJSON(t, `[9007199254740991]`, `[9007199254740991]`) + testSortJSON(t, "\t\n[9007199254740991]", `[9007199254740991]`) } func testCompactJSON(t *testing.T, input, want string) { diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/keyring.go b/vendor/src/github.com/matrix-org/gomatrixserverlib/keyring.go index cd74de67..1a1c9d83 100644 --- a/vendor/src/github.com/matrix-org/gomatrixserverlib/keyring.go +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/keyring.go @@ -6,6 +6,7 @@ import ( "strings" "time" + "github.com/matrix-org/util" "golang.org/x/crypto/ed25519" ) @@ -60,6 +61,10 @@ type KeyFetcher interface { // The result may have more (server name, key ID) pairs than were in the request. // Returns an error if there was a problem fetching the keys. FetchKeys(ctx context.Context, requests map[PublicKeyRequest]Timestamp) (map[PublicKeyRequest]PublicKeyLookupResult, error) + + // FetcherName returns the name of this fetcher, which can then be used for + // logging errors etc. + FetcherName() string } // A KeyDatabase is a store for caching public keys. @@ -113,6 +118,7 @@ type JSONVerifier interface { // VerifyJSONs implements JSONVerifier. func (k KeyRing) VerifyJSONs(ctx context.Context, requests []VerifyJSONRequest) ([]VerifyJSONResult, error) { // nolint: gocyclo + logger := util.GetLogger(ctx) results := make([]VerifyJSONResult, len(requests)) keyIDs := make([][]KeyID, len(requests)) @@ -154,7 +160,7 @@ func (k KeyRing) VerifyJSONs(ctx context.Context, requests []VerifyJSONRequest) } k.checkUsingKeys(requests, results, keyIDs, keysFromDatabase) - for i := range k.KeyFetchers { + for _, fetcher := range k.KeyFetchers { // TODO: we should distinguish here between expired keys, and those we don't have. // If the key has expired, it's no use re-requesting it. keyRequests := k.publicKeyRequests(requests, results, keyIDs) @@ -163,12 +169,22 @@ func (k KeyRing) VerifyJSONs(ctx context.Context, requests []VerifyJSONRequest) // This means that we've checked every JSON object we can check. return results, nil } + fetcherLogger := logger.WithField("fetcher", fetcher.FetcherName()) + // TODO: Coalesce in-flight requests for the same keys. // Otherwise we risk spamming the servers we query the keys from. - keysFetched, err := k.KeyFetchers[i].FetchKeys(ctx, keyRequests) + + fetcherLogger.WithField("num_key_requests", len(keyRequests)). + Info("Requesting keys from fetcher") + + keysFetched, err := fetcher.FetchKeys(ctx, keyRequests) if err != nil { return nil, err } + + fetcherLogger.WithField("num_keys_fetched", len(keysFetched)). + Info("Got keys from fetcher") + k.checkUsingKeys(requests, results, keyIDs, keysFetched) // Add the keys to the database so that we won't need to fetch them again. @@ -259,6 +275,11 @@ type PerspectiveKeyFetcher struct { Client Client } +// FetcherName implements KeyFetcher +func (p PerspectiveKeyFetcher) FetcherName() string { + return fmt.Sprintf("perspective server %s", p.PerspectiveServerName) +} + // FetchKeys implements KeyFetcher func (p *PerspectiveKeyFetcher) FetchKeys( ctx context.Context, requests map[PublicKeyRequest]Timestamp, @@ -303,7 +324,8 @@ func (p *PerspectiveKeyFetcher) FetchKeys( return nil, fmt.Errorf("gomatrixserverlib: key response from perspective server failed checks") } - // TODO: What happens if the same key ID appears in multiple responses? + // TODO (matrix-org/dendrite#345): What happens if the same key ID + // appears in multiple responses? // We should probably take the response with the highest valid_until_ts. mapServerKeysToPublicKeyLookupResult(keys, results) } @@ -318,6 +340,11 @@ type DirectKeyFetcher struct { Client Client } +// FetcherName implements KeyFetcher +func (d DirectKeyFetcher) FetcherName() string { + return "DirectKeyFetcher" +} + // FetchKeys implements KeyFetcher func (d *DirectKeyFetcher) FetchKeys( ctx context.Context, requests map[PublicKeyRequest]Timestamp, @@ -333,9 +360,9 @@ func (d *DirectKeyFetcher) FetchKeys( } results := map[PublicKeyRequest]PublicKeyLookupResult{} - for server, reqs := range byServer { + for server := range byServer { // TODO: make these requests in parallel - serverResults, err := d.fetchKeysForServer(ctx, server, reqs) + serverResults, err := d.fetchKeysForServer(ctx, server) if err != nil { // TODO: Should we actually be erroring here? or should we just drop those keys from the result map? return nil, err @@ -348,25 +375,23 @@ func (d *DirectKeyFetcher) FetchKeys( } func (d *DirectKeyFetcher) fetchKeysForServer( - ctx context.Context, serverName ServerName, requests map[PublicKeyRequest]Timestamp, + ctx context.Context, serverName ServerName, ) (map[PublicKeyRequest]PublicKeyLookupResult, error) { - serverKeys, err := d.Client.LookupServerKeys(ctx, serverName, requests) + keys, err := d.Client.GetServerKeys(ctx, serverName) if err != nil { return nil, err } + // Check that the keys are valid for the server. + checks, _, _ := CheckKeys(serverName, time.Unix(0, 0), keys, nil) + if !checks.AllChecksOK { + return nil, fmt.Errorf("gomatrixserverlib: key response direct from %q failed checks", serverName) + } results := map[PublicKeyRequest]PublicKeyLookupResult{} - for _, keys := range serverKeys { - // Check that the keys are valid for the server. - checks, _, _ := CheckKeys(serverName, time.Unix(0, 0), keys, nil) - if !checks.AllChecksOK { - return nil, fmt.Errorf("gomatrixserverlib: key response direct from %q failed checks", serverName) - } - // TODO: What happens if the same key ID appears in multiple responses? - // We should probably take the response with the highest valid_until_ts. - mapServerKeysToPublicKeyLookupResult(keys, results) - } + // TODO (matrix-org/dendrite#345): What happens if the same key ID + // appears in multiple responses? We should probably reject the response. + mapServerKeysToPublicKeyLookupResult(keys, results) return results, nil } diff --git a/vendor/src/github.com/matrix-org/gomatrixserverlib/keyring_test.go b/vendor/src/github.com/matrix-org/gomatrixserverlib/keyring_test.go index 07d1995a..b6e1f01a 100644 --- a/vendor/src/github.com/matrix-org/gomatrixserverlib/keyring_test.go +++ b/vendor/src/github.com/matrix-org/gomatrixserverlib/keyring_test.go @@ -36,6 +36,10 @@ var testKeys = `{ type testKeyDatabase struct{} +func (db testKeyDatabase) FetcherName() string { + return "testKeyDatabase" +} + func (db *testKeyDatabase) FetchKeys( ctx context.Context, requests map[PublicKeyRequest]Timestamp, ) (map[PublicKeyRequest]PublicKeyLookupResult, error) { @@ -151,6 +155,11 @@ func (e *erroringKeyDatabaseError) Error() string { return "An error with the ke var testErrorFetch = erroringKeyDatabaseError(1) var testErrorStore = erroringKeyDatabaseError(2) +// FetcherName implements KeyFetcher +func (e erroringKeyDatabase) FetcherName() string { + return "ErroringKeyDatabase" +} + func (e *erroringKeyDatabase) FetchKeys( ctx context.Context, requests map[PublicKeyRequest]Timestamp, ) (map[PublicKeyRequest]PublicKeyLookupResult, error) { diff --git a/vendor/src/github.com/tidwall/gjson/LICENSE b/vendor/src/github.com/tidwall/gjson/LICENSE new file mode 100644 index 00000000..58f5819a --- /dev/null +++ b/vendor/src/github.com/tidwall/gjson/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2016 Josh Baker + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/src/github.com/tidwall/gjson/README.md b/vendor/src/github.com/tidwall/gjson/README.md new file mode 100644 index 00000000..9f3ddb1b --- /dev/null +++ b/vendor/src/github.com/tidwall/gjson/README.md @@ -0,0 +1,373 @@ +
+ + + +get a json value quickly
+ +GJSON is a Go package that provides a [fast](#performance) and [simple](#get-a-value) way to get values from a json document. +It has features such as [one line retrieval](#get-a-value), [dot notation paths](#path-syntax), [iteration](#iterate-through-an-object-or-array). + +Getting Started +=============== + +## Installing + +To start using GJSON, install Go and run `go get`: + +```sh +$ go get -u github.com/tidwall/gjson +``` + +This will retrieve the library. + +## Get a value +Get searches json for the specified path. A path is in dot syntax, such as "name.last" or "age". This function expects that the json is well-formed. Bad json will not panic, but it may return back unexpected results. When the value is found it's returned immediately. + +```go +package main + +import "github.com/tidwall/gjson" + +const json = `{"name":{"first":"Janet","last":"Prichard"},"age":47}` + +func main() { + value := gjson.Get(json, "name.last") + println(value.String()) +} +``` + +This will print: + +``` +Prichard +``` +*There's also the [GetMany](#get-multiple-values-at-once) function to get multiple values at once, and [GetBytes](#working-with-bytes) for working with JSON byte slices.* + +## Path Syntax + +A path is a series of keys separated by a dot. +A key may contain special wildcard characters '\*' and '?'. +To access an array value use the index as the key. +To get the number of elements in an array or to access a child path, use the '#' character. +The dot and wildcard characters can be escaped with '\\'. + +```json +{ + "name": {"first": "Tom", "last": "Anderson"}, + "age":37, + "children": ["Sara","Alex","Jack"], + "fav.movie": "Deer Hunter", + "friends": [ + {"first": "Dale", "last": "Murphy", "age": 44}, + {"first": "Roger", "last": "Craig", "age": 68}, + {"first": "Jane", "last": "Murphy", "age": 47} + ] +} +``` +``` +"name.last" >> "Anderson" +"age" >> 37 +"children" >> ["Sara","Alex","Jack"] +"children.#" >> 3 +"children.1" >> "Alex" +"child*.2" >> "Jack" +"c?ildren.0" >> "Sara" +"fav\.movie" >> "Deer Hunter" +"friends.#.first" >> ["Dale","Roger","Jane"] +"friends.1.last" >> "Craig" +``` + +You can also query an array for the first match by using `#[...]`, or find all matches with `#[...]#`. +Queries support the `==`, `!=`, `<`, `<=`, `>`, `>=` comparison operators and the simple pattern matching `%` operator. + +``` +friends.#[last=="Murphy"].first >> "Dale" +friends.#[last=="Murphy"]#.first >> ["Dale","Jane"] +friends.#[age>45]#.last >> ["Craig","Murphy"] +friends.#[first%"D*"].last >> "Murphy" +``` + +## Result Type + +GJSON supports the json types `string`, `number`, `bool`, and `null`. +Arrays and Objects are returned as their raw json types. + +The `Result` type holds one of these: + +``` +bool, for JSON booleans +float64, for JSON numbers +string, for JSON string literals +nil, for JSON null +``` + +To directly access the value: + +```go +result.Type // can be String, Number, True, False, Null, or JSON +result.Str // holds the string +result.Num // holds the float64 number +result.Raw // holds the raw json +result.Index // index of raw value in original json, zero means index unknown +``` + +There are a variety of handy functions that work on a result: + +```go +result.Exists() bool +result.Value() interface{} +result.Int() int64 +result.Uint() uint64 +result.Float() float64 +result.String() string +result.Bool() bool +result.Time() time.Time +result.Array() []gjson.Result +result.Map() map[string]gjson.Result +result.Get(path string) Result +result.ForEach(iterator func(key, value Result) bool) +result.Less(token Result, caseSensitive bool) bool +``` + +The `result.Value()` function returns an `interface{}` which requires type assertion and is one of the following Go types: + +The `result.Array()` function returns back an array of values. +If the result represents a non-existent value, then an empty array will be returned. +If the result is not a JSON array, the return value will be an array containing one result. + +```go +boolean >> bool +number >> float64 +string >> string +null >> nil +array >> []interface{} +object >> map[string]interface{} +``` + +## Get nested array values + +Suppose you want all the last names from the following json: + +```json +{ + "programmers": [ + { + "firstName": "Janet", + "lastName": "McLaughlin", + }, { + "firstName": "Elliotte", + "lastName": "Hunter", + }, { + "firstName": "Jason", + "lastName": "Harold", + } + ] +} +``` + +You would use the path "programmers.#.lastName" like such: + +```go +result := gjson.Get(json, "programmers.#.lastName") +for _, name := range result.Array() { + println(name.String()) +} +``` + +You can also query an object inside an array: + +```go +name := gjson.Get(json, `programmers.#[lastName="Hunter"].firstName`) +println(name.String()) // prints "Elliotte" +``` + +## Iterate through an object or array + +The `ForEach` function allows for quickly iterating through an object or array. +The key and value are passed to the iterator function for objects. +Only the value is passed for arrays. +Returning `false` from an iterator will stop iteration. + +```go +result := gjson.Get(json, "programmers") +result.ForEach(func(key, value gjson.Result) bool { + println(value.String()) + return true // keep iterating +}) +``` + +## Simple Parse and Get + +There's a `Parse(json)` function that will do a simple parse, and `result.Get(path)` that will search a result. + +For example, all of these will return the same result: + +```go +gjson.Parse(json).Get("name").Get("last") +gjson.Get(json, "name").Get("last") +gjson.Get(json, "name.last") +``` + +## Check for the existence of a value + +Sometimes you just want to know if a value exists. + +```go +value := gjson.Get(json, "name.last") +if !value.Exists() { + println("no last name") +} else { + println(value.String()) +} + +// Or as one step +if gjson.Get(json, "name.last").Exists() { + println("has a last name") +} +``` + +## Unmarshal to a map + +To unmarshal to a `map[string]interface{}`: + +```go +m, ok := gjson.Parse(json).Value().(map[string]interface{}) +if !ok { + // not a map +} +``` + +## Working with Bytes + +If your JSON is contained in a `[]byte` slice, there's the [GetBytes](https://godoc.org/github.com/tidwall/gjson#GetBytes) function. This is preferred over `Get(string(data), path)`. + +```go +var json []byte = ... +result := gjson.GetBytes(json, path) +``` + +If you are using the `gjson.GetBytes(json, path)` function and you want to avoid converting `result.Raw` to a `[]byte`, then you can use this pattern: + +```go +var json []byte = ... +result := gjson.GetBytes(json, path) +var raw []byte +if result.Index > 0 { + raw = json[result.Index:result.Index+len(result.Raw)] +} else { + raw = []byte(result.Raw) +} +``` + +This is a best-effort no allocation sub slice of the original json. This method utilizes the `result.Index` field, which is the position of the raw data in the original json. It's possible that the value of `result.Index` equals zero, in which case the `result.Raw` is converted to a `[]byte`. + +## Get multiple values at once + +The `GetMany` function can be used to get multiple values at the same time, and is optimized to scan over a JSON payload once. + +```go +results := gjson.GetMany(json, "name.first", "name.last", "age") +``` + +The return value is a `[]Result`, which will always contain exactly the same number of items as the input paths. + +## Performance + +Benchmarks of GJSON alongside [encoding/json](https://golang.org/pkg/encoding/json/), +[ffjson](https://github.com/pquerna/ffjson), +[EasyJSON](https://github.com/mailru/easyjson), +[jsonparser](https://github.com/buger/jsonparser), +and [json-iterator](https://github.com/json-iterator/go) + +``` +BenchmarkGJSONGet-8 3000000 372 ns/op 0 B/op 0 allocs/op +BenchmarkGJSONUnmarshalMap-8 900000 4154 ns/op 1920 B/op 26 allocs/op +BenchmarkJSONUnmarshalMap-8 600000 9019 ns/op 3048 B/op 69 allocs/op +BenchmarkJSONDecoder-8 300000 14120 ns/op 4224 B/op 184 allocs/op +BenchmarkFFJSONLexer-8 1500000 3111 ns/op 896 B/op 8 allocs/op +BenchmarkEasyJSONLexer-8 3000000 887 ns/op 613 B/op 6 allocs/op +BenchmarkJSONParserGet-8 3000000 499 ns/op 21 B/op 0 allocs/op +BenchmarkJSONIterator-8 3000000 812 ns/op 544 B/op 9 allocs/op +``` + +Benchmarks for the `GetMany` function: + +``` +BenchmarkGJSONGetMany4Paths-8 4000000 303 ns/op 112 B/op 0 allocs/op +BenchmarkGJSONGetMany8Paths-8 8000000 208 ns/op 56 B/op 0 allocs/op +BenchmarkGJSONGetMany16Paths-8 16000000 156 ns/op 56 B/op 0 allocs/op +BenchmarkGJSONGetMany32Paths-8 32000000 127 ns/op 64 B/op 0 allocs/op +BenchmarkGJSONGetMany64Paths-8 64000000 117 ns/op 64 B/op 0 allocs/op +BenchmarkGJSONGetMany128Paths-8 128000000 109 ns/op 64 B/op 0 allocs/op +``` + +JSON document used: + +```json +{ + "widget": { + "debug": "on", + "window": { + "title": "Sample Konfabulator Widget", + "name": "main_window", + "width": 500, + "height": 500 + }, + "image": { + "src": "Images/Sun.png", + "hOffset": 250, + "vOffset": 250, + "alignment": "center" + }, + "text": { + "data": "Click Here", + "size": 36, + "style": "bold", + "vOffset": 100, + "alignment": "center", + "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;" + } + } +} +``` + +Each operation was rotated though one of the following search paths: + +``` +widget.window.name +widget.image.hOffset +widget.text.onMouseUp +``` + +For the `GetMany` benchmarks these paths are used: + +``` +widget.window.name +widget.image.hOffset +widget.text.onMouseUp +widget.window.title +widget.image.alignment +widget.text.style +widget.window.height +widget.image.src +widget.text.data +widget.text.size +``` + +*These benchmarks were run on a MacBook Pro 15" 2.8 GHz Intel Core i7 using Go 1.8 and can be be found [here](https://github.com/tidwall/gjson-benchmarks).* + + +## Contact +Josh Baker [@tidwall](http://twitter.com/tidwall) + +## License + +GJSON source code is available under the MIT [License](/LICENSE). diff --git a/vendor/src/github.com/tidwall/gjson/gjson.go b/vendor/src/github.com/tidwall/gjson/gjson.go new file mode 100644 index 00000000..e16a4b72 --- /dev/null +++ b/vendor/src/github.com/tidwall/gjson/gjson.go @@ -0,0 +1,2451 @@ +// Package gjson provides searching for json strings. +package gjson + +import ( + "encoding/base64" + "encoding/json" + "errors" + "reflect" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" + "unicode/utf16" + "unicode/utf8" + "unsafe" + + "github.com/tidwall/match" +) + +// Type is Result type +type Type int + +const ( + // Null is a null json value + Null Type = iota + // False is a json false boolean + False + // Number is json number + Number + // String is a json string + String + // True is a json true boolean + True + // JSON is a raw block of JSON + JSON +) + +// String returns a string representation of the type. +func (t Type) String() string { + switch t { + default: + return "" + case Null: + return "Null" + case False: + return "False" + case Number: + return "Number" + case String: + return "String" + case True: + return "True" + case JSON: + return "JSON" + } +} + +// Result represents a json value that is returned from Get(). +type Result struct { + // Type is the json type + Type Type + // Raw is the raw json + Raw string + // Str is the json string + Str string + // Num is the json number + Num float64 + // Index of raw value in original json, zero means index unknown + Index int +} + +// String returns a string representation of the value. +func (t Result) String() string { + switch t.Type { + default: + return "" + case False: + return "false" + case Number: + return strconv.FormatFloat(t.Num, 'f', -1, 64) + case String: + return t.Str + case JSON: + return t.Raw + case True: + return "true" + } +} + +// Bool returns an boolean representation. +func (t Result) Bool() bool { + switch t.Type { + default: + return false + case True: + return true + case String: + return t.Str != "" && t.Str != "0" + case Number: + return t.Num != 0 + } +} + +// Int returns an integer representation. +func (t Result) Int() int64 { + switch t.Type { + default: + return 0 + case True: + return 1 + case String: + n, _ := parseInt(t.Str) + return n + case Number: + // try to directly convert the float64 to int64 + n, ok := floatToInt(t.Num) + if !ok { + // now try to parse the raw string + n, ok = parseInt(t.Raw) + if !ok { + // fallback to a standard conversion + return int64(t.Num) + } + } + return n + } +} + +// Uint returns an unsigned integer representation. +func (t Result) Uint() uint64 { + switch t.Type { + default: + return 0 + case True: + return 1 + case String: + n, _ := parseUint(t.Str) + return n + case Number: + // try to directly convert the float64 to uint64 + n, ok := floatToUint(t.Num) + if !ok { + // now try to parse the raw string + n, ok = parseUint(t.Raw) + if !ok { + // fallback to a standard conversion + return uint64(t.Num) + } + } + return n + } +} + +// Float returns an float64 representation. +func (t Result) Float() float64 { + switch t.Type { + default: + return 0 + case True: + return 1 + case String: + n, _ := strconv.ParseFloat(t.Str, 64) + return n + case Number: + return t.Num + } +} + +// Time returns a time.Time representation. +func (t Result) Time() time.Time { + res, _ := time.Parse(time.RFC3339, t.String()) + return res +} + +// Array returns back an array of values. +// If the result represents a non-existent value, then an empty array will be returned. +// If the result is not a JSON array, the return value will be an array containing one result. +func (t Result) Array() []Result { + if !t.Exists() { + return nil + } + if t.Type != JSON { + return []Result{t} + } + r := t.arrayOrMap('[', false) + return r.a +} + +// IsObject returns true if the result value is a JSON object. +func (t Result) IsObject() bool { + return t.Type == JSON && len(t.Raw) > 0 && t.Raw[0] == '{' +} + +// IsObject returns true if the result value is a JSON array. +func (t Result) IsArray() bool { + return t.Type == JSON && len(t.Raw) > 0 && t.Raw[0] == '[' +} + +// ForEach iterates through values. +// If the result represents a non-existent value, then no values will be iterated. +// If the result is an Object, the iterator will pass the key and value of each item. +// If the result is an Array, the iterator will only pass the value of each item. +// If the result is not a JSON array or object, the iterator will pass back one value equal to the result. +func (t Result) ForEach(iterator func(key, value Result) bool) { + if !t.Exists() { + return + } + if t.Type != JSON { + iterator(Result{}, t) + return + } + json := t.Raw + var keys bool + var i int + var key, value Result + for ; i < len(json); i++ { + if json[i] == '{' { + i++ + key.Type = String + keys = true + break + } else if json[i] == '[' { + i++ + break + } + if json[i] > ' ' { + return + } + } + var str string + var vesc bool + var ok bool + for ; i < len(json); i++ { + if keys { + if json[i] != '"' { + continue + } + s := i + i, str, vesc, ok = parseString(json, i+1) + if !ok { + return + } + if vesc { + key.Str = unescape(str[1 : len(str)-1]) + } else { + key.Str = str[1 : len(str)-1] + } + key.Raw = str + key.Index = s + } + for ; i < len(json); i++ { + if json[i] <= ' ' || json[i] == ',' || json[i] == ':' { + continue + } + break + } + s := i + i, value, ok = parseAny(json, i, true) + if !ok { + return + } + value.Index = s + if !iterator(key, value) { + return + } + } +} + +// Map returns back an map of values. The result should be a JSON array. +func (t Result) Map() map[string]Result { + if t.Type != JSON { + return map[string]Result{} + } + r := t.arrayOrMap('{', false) + return r.o +} + +// Get searches result for the specified path. +// The result should be a JSON array or object. +func (t Result) Get(path string) Result { + return Get(t.Raw, path) +} + +type arrayOrMapResult struct { + a []Result + ai []interface{} + o map[string]Result + oi map[string]interface{} + vc byte +} + +func (t Result) arrayOrMap(vc byte, valueize bool) (r arrayOrMapResult) { + var json = t.Raw + var i int + var value Result + var count int + var key Result + if vc == 0 { + for ; i < len(json); i++ { + if json[i] == '{' || json[i] == '[' { + r.vc = json[i] + i++ + break + } + if json[i] > ' ' { + goto end + } + } + } else { + for ; i < len(json); i++ { + if json[i] == vc { + i++ + break + } + if json[i] > ' ' { + goto end + } + } + r.vc = vc + } + if r.vc == '{' { + if valueize { + r.oi = make(map[string]interface{}) + } else { + r.o = make(map[string]Result) + } + } else { + if valueize { + r.ai = make([]interface{}, 0) + } else { + r.a = make([]Result, 0) + } + } + for ; i < len(json); i++ { + if json[i] <= ' ' { + continue + } + // get next value + if json[i] == ']' || json[i] == '}' { + break + } + switch json[i] { + default: + if (json[i] >= '0' && json[i] <= '9') || json[i] == '-' { + value.Type = Number + value.Raw, value.Num = tonum(json[i:]) + } else { + continue + } + case '{', '[': + value.Type = JSON + value.Raw = squash(json[i:]) + case 'n': + value.Type = Null + value.Raw = tolit(json[i:]) + case 't': + value.Type = True + value.Raw = tolit(json[i:]) + case 'f': + value.Type = False + value.Raw = tolit(json[i:]) + case '"': + value.Type = String + value.Raw, value.Str = tostr(json[i:]) + } + i += len(value.Raw) - 1 + + if r.vc == '{' { + if count%2 == 0 { + key = value + } else { + if valueize { + r.oi[key.Str] = value.Value() + } else { + r.o[key.Str] = value + } + } + count++ + } else { + if valueize { + r.ai = append(r.ai, value.Value()) + } else { + r.a = append(r.a, value) + } + } + } +end: + return +} + +// Parse parses the json and returns a result. +func Parse(json string) Result { + var value Result + for i := 0; i < len(json); i++ { + if json[i] == '{' || json[i] == '[' { + value.Type = JSON + value.Raw = json[i:] // just take the entire raw + break + } + if json[i] <= ' ' { + continue + } + switch json[i] { + default: + if (json[i] >= '0' && json[i] <= '9') || json[i] == '-' { + value.Type = Number + value.Raw, value.Num = tonum(json[i:]) + } else { + return Result{} + } + case 'n': + value.Type = Null + value.Raw = tolit(json[i:]) + case 't': + value.Type = True + value.Raw = tolit(json[i:]) + case 'f': + value.Type = False + value.Raw = tolit(json[i:]) + case '"': + value.Type = String + value.Raw, value.Str = tostr(json[i:]) + } + break + } + return value +} + +// ParseBytes parses the json and returns a result. +// If working with bytes, this method preferred over Parse(string(data)) +func ParseBytes(json []byte) Result { + return Parse(string(json)) +} + +func squash(json string) string { + // expects that the lead character is a '[' or '{' + // squash the value, ignoring all nested arrays and objects. + // the first '[' or '{' has already been read + depth := 1 + for i := 1; i < len(json); i++ { + if json[i] >= '"' && json[i] <= '}' { + switch json[i] { + case '"': + i++ + s2 := i + for ; i < len(json); i++ { + if json[i] > '\\' { + continue + } + if json[i] == '"' { + // look for an escaped slash + if json[i-1] == '\\' { + n := 0 + for j := i - 2; j > s2-1; j-- { + if json[j] != '\\' { + break + } + n++ + } + if n%2 == 0 { + continue + } + } + break + } + } + case '{', '[': + depth++ + case '}', ']': + depth-- + if depth == 0 { + return json[:i+1] + } + } + } + } + return json +} + +func tonum(json string) (raw string, num float64) { + for i := 1; i < len(json); i++ { + // less than dash might have valid characters + if json[i] <= '-' { + if json[i] <= ' ' || json[i] == ',' { + // break on whitespace and comma + raw = json[:i] + num, _ = strconv.ParseFloat(raw, 64) + return + } + // could be a '+' or '-'. let's assume so. + continue + } + if json[i] < ']' { + // probably a valid number + continue + } + if json[i] == 'e' || json[i] == 'E' { + // allow for exponential numbers + continue + } + // likely a ']' or '}' + raw = json[:i] + num, _ = strconv.ParseFloat(raw, 64) + return + } + raw = json + num, _ = strconv.ParseFloat(raw, 64) + return +} + +func tolit(json string) (raw string) { + for i := 1; i < len(json); i++ { + if json[i] < 'a' || json[i] > 'z' { + return json[:i] + } + } + return json +} + +func tostr(json string) (raw string, str string) { + // expects that the lead character is a '"' + for i := 1; i < len(json); i++ { + if json[i] > '\\' { + continue + } + if json[i] == '"' { + return json[:i+1], json[1:i] + } + if json[i] == '\\' { + i++ + for ; i < len(json); i++ { + if json[i] > '\\' { + continue + } + if json[i] == '"' { + // look for an escaped slash + if json[i-1] == '\\' { + n := 0 + for j := i - 2; j > 0; j-- { + if json[j] != '\\' { + break + } + n++ + } + if n%2 == 0 { + continue + } + } + break + } + } + var ret string + if i+1 < len(json) { + ret = json[:i+1] + } else { + ret = json[:i] + } + return ret, unescape(json[1:i]) + } + } + return json, json[1:] +} + +// Exists returns true if value exists. +// +// if gjson.Get(json, "name.last").Exists(){ +// println("value exists") +// } +func (t Result) Exists() bool { + return t.Type != Null || len(t.Raw) != 0 +} + +// Value returns one of these types: +// +// bool, for JSON booleans +// float64, for JSON numbers +// Number, for JSON numbers +// string, for JSON string literals +// nil, for JSON null +// +func (t Result) Value() interface{} { + if t.Type == String { + return t.Str + } + switch t.Type { + default: + return nil + case False: + return false + case Number: + return t.Num + case JSON: + r := t.arrayOrMap(0, true) + if r.vc == '{' { + return r.oi + } else if r.vc == '[' { + return r.ai + } + return nil + case True: + return true + } +} + +func parseString(json string, i int) (int, string, bool, bool) { + var s = i + for ; i < len(json); i++ { + if json[i] > '\\' { + continue + } + if json[i] == '"' { + return i + 1, json[s-1 : i+1], false, true + } + if json[i] == '\\' { + i++ + for ; i < len(json); i++ { + if json[i] > '\\' { + continue + } + if json[i] == '"' { + // look for an escaped slash + if json[i-1] == '\\' { + n := 0 + for j := i - 2; j > 0; j-- { + if json[j] != '\\' { + break + } + n++ + } + if n%2 == 0 { + continue + } + } + return i + 1, json[s-1 : i+1], true, true + } + } + break + } + } + return i, json[s-1:], false, false +} + +func parseNumber(json string, i int) (int, string) { + var s = i + i++ + for ; i < len(json); i++ { + if json[i] <= ' ' || json[i] == ',' || json[i] == ']' || json[i] == '}' { + return i, json[s:i] + } + } + return i, json[s:] +} + +func parseLiteral(json string, i int) (int, string) { + var s = i + i++ + for ; i < len(json); i++ { + if json[i] < 'a' || json[i] > 'z' { + return i, json[s:i] + } + } + return i, json[s:] +} + +type arrayPathResult struct { + part string + path string + more bool + alogok bool + arrch bool + alogkey string + query struct { + on bool + path string + op string + value string + all bool + } +} + +func parseArrayPath(path string) (r arrayPathResult) { + for i := 0; i < len(path); i++ { + if path[i] == '.' { + r.part = path[:i] + r.path = path[i+1:] + r.more = true + return + } + if path[i] == '#' { + r.arrch = true + if i == 0 && len(path) > 1 { + if path[1] == '.' { + r.alogok = true + r.alogkey = path[2:] + r.path = path[:1] + } else if path[1] == '[' { + r.query.on = true + // query + i += 2 + // whitespace + for ; i < len(path); i++ { + if path[i] > ' ' { + break + } + } + s := i + for ; i < len(path); i++ { + if path[i] <= ' ' || + path[i] == '!' || + path[i] == '=' || + path[i] == '<' || + path[i] == '>' || + path[i] == '%' || + path[i] == ']' { + break + } + } + r.query.path = path[s:i] + // whitespace + for ; i < len(path); i++ { + if path[i] > ' ' { + break + } + } + if i < len(path) { + s = i + if path[i] == '!' { + if i < len(path)-1 && path[i+1] == '=' { + i++ + } + } else if path[i] == '<' || path[i] == '>' { + if i < len(path)-1 && path[i+1] == '=' { + i++ + } + } else if path[i] == '=' { + if i < len(path)-1 && path[i+1] == '=' { + s++ + i++ + } + } + i++ + r.query.op = path[s:i] + // whitespace + for ; i < len(path); i++ { + if path[i] > ' ' { + break + } + } + s = i + for ; i < len(path); i++ { + if path[i] == '"' { + i++ + s2 := i + for ; i < len(path); i++ { + if path[i] > '\\' { + continue + } + if path[i] == '"' { + // look for an escaped slash + if path[i-1] == '\\' { + n := 0 + for j := i - 2; j > s2-1; j-- { + if path[j] != '\\' { + break + } + n++ + } + if n%2 == 0 { + continue + } + } + break + } + } + } else if path[i] == ']' { + if i+1 < len(path) && path[i+1] == '#' { + r.query.all = true + } + break + } + } + if i > len(path) { + i = len(path) + } + v := path[s:i] + for len(v) > 0 && v[len(v)-1] <= ' ' { + v = v[:len(v)-1] + } + r.query.value = v + } + } + } + continue + } + } + r.part = path + r.path = "" + return +} + +type objectPathResult struct { + part string + path string + wild bool + more bool +} + +func parseObjectPath(path string) (r objectPathResult) { + for i := 0; i < len(path); i++ { + if path[i] == '.' { + r.part = path[:i] + r.path = path[i+1:] + r.more = true + return + } + if path[i] == '*' || path[i] == '?' { + r.wild = true + continue + } + if path[i] == '\\' { + // go into escape mode. this is a slower path that + // strips off the escape character from the part. + epart := []byte(path[:i]) + i++ + if i < len(path) { + epart = append(epart, path[i]) + i++ + for ; i < len(path); i++ { + if path[i] == '\\' { + i++ + if i < len(path) { + epart = append(epart, path[i]) + } + continue + } else if path[i] == '.' { + r.part = string(epart) + r.path = path[i+1:] + r.more = true + return + } else if path[i] == '*' || path[i] == '?' { + r.wild = true + } + epart = append(epart, path[i]) + } + } + // append the last part + r.part = string(epart) + return + } + } + r.part = path + return +} + +func parseSquash(json string, i int) (int, string) { + // expects that the lead character is a '[' or '{' + // squash the value, ignoring all nested arrays and objects. + // the first '[' or '{' has already been read + s := i + i++ + depth := 1 + for ; i < len(json); i++ { + if json[i] >= '"' && json[i] <= '}' { + switch json[i] { + case '"': + i++ + s2 := i + for ; i < len(json); i++ { + if json[i] > '\\' { + continue + } + if json[i] == '"' { + // look for an escaped slash + if json[i-1] == '\\' { + n := 0 + for j := i - 2; j > s2-1; j-- { + if json[j] != '\\' { + break + } + n++ + } + if n%2 == 0 { + continue + } + } + break + } + } + case '{', '[': + depth++ + case '}', ']': + depth-- + if depth == 0 { + i++ + return i, json[s:i] + } + } + } + } + return i, json[s:] +} + +func parseObject(c *parseContext, i int, path string) (int, bool) { + var pmatch, kesc, vesc, ok, hit bool + var key, val string + rp := parseObjectPath(path) + for i < len(c.json) { + for ; i < len(c.json); i++ { + if c.json[i] == '"' { + // parse_key_string + // this is slightly different from getting s string value + // because we don't need the outer quotes. + i++ + var s = i + for ; i < len(c.json); i++ { + if c.json[i] > '\\' { + continue + } + if c.json[i] == '"' { + i, key, kesc, ok = i+1, c.json[s:i], false, true + goto parse_key_string_done + } + if c.json[i] == '\\' { + i++ + for ; i < len(c.json); i++ { + if c.json[i] > '\\' { + continue + } + if c.json[i] == '"' { + // look for an escaped slash + if c.json[i-1] == '\\' { + n := 0 + for j := i - 2; j > 0; j-- { + if c.json[j] != '\\' { + break + } + n++ + } + if n%2 == 0 { + continue + } + } + i, key, kesc, ok = i+1, c.json[s:i], true, true + goto parse_key_string_done + } + } + break + } + } + key, kesc, ok = c.json[s:], false, false + parse_key_string_done: + break + } + if c.json[i] == '}' { + return i + 1, false + } + } + if !ok { + return i, false + } + if rp.wild { + if kesc { + pmatch = match.Match(unescape(key), rp.part) + } else { + pmatch = match.Match(key, rp.part) + } + } else { + if kesc { + pmatch = rp.part == unescape(key) + } else { + pmatch = rp.part == key + } + } + hit = pmatch && !rp.more + for ; i < len(c.json); i++ { + switch c.json[i] { + default: + continue + case '"': + i++ + i, val, vesc, ok = parseString(c.json, i) + if !ok { + return i, false + } + if hit { + if vesc { + c.value.Str = unescape(val[1 : len(val)-1]) + } else { + c.value.Str = val[1 : len(val)-1] + } + c.value.Raw = val + c.value.Type = String + return i, true + } + case '{': + if pmatch && !hit { + i, hit = parseObject(c, i+1, rp.path) + if hit { + return i, true + } + } else { + i, val = parseSquash(c.json, i) + if hit { + c.value.Raw = val + c.value.Type = JSON + return i, true + } + } + case '[': + if pmatch && !hit { + i, hit = parseArray(c, i+1, rp.path) + if hit { + return i, true + } + } else { + i, val = parseSquash(c.json, i) + if hit { + c.value.Raw = val + c.value.Type = JSON + return i, true + } + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + i, val = parseNumber(c.json, i) + if hit { + c.value.Raw = val + c.value.Type = Number + c.value.Num, _ = strconv.ParseFloat(val, 64) + return i, true + } + case 't', 'f', 'n': + vc := c.json[i] + i, val = parseLiteral(c.json, i) + if hit { + c.value.Raw = val + switch vc { + case 't': + c.value.Type = True + case 'f': + c.value.Type = False + } + return i, true + } + } + break + } + } + return i, false +} +func queryMatches(rp *arrayPathResult, value Result) bool { + rpv := rp.query.value + if len(rpv) > 2 && rpv[0] == '"' && rpv[len(rpv)-1] == '"' { + rpv = rpv[1 : len(rpv)-1] + } + switch value.Type { + case String: + switch rp.query.op { + case "=": + return value.Str == rpv + case "!=": + return value.Str != rpv + case "<": + return value.Str < rpv + case "<=": + return value.Str <= rpv + case ">": + return value.Str > rpv + case ">=": + return value.Str >= rpv + case "%": + return match.Match(value.Str, rpv) + } + case Number: + rpvn, _ := strconv.ParseFloat(rpv, 64) + switch rp.query.op { + case "=": + return value.Num == rpvn + case "!=": + return value.Num == rpvn + case "<": + return value.Num < rpvn + case "<=": + return value.Num <= rpvn + case ">": + return value.Num > rpvn + case ">=": + return value.Num >= rpvn + } + case True: + switch rp.query.op { + case "=": + return rpv == "true" + case "!=": + return rpv != "true" + case ">": + return rpv == "false" + case ">=": + return true + } + case False: + switch rp.query.op { + case "=": + return rpv == "false" + case "!=": + return rpv != "false" + case "<": + return rpv == "true" + case "<=": + return true + } + } + return false +} +func parseArray(c *parseContext, i int, path string) (int, bool) { + var pmatch, vesc, ok, hit bool + var val string + var h int + var alog []int + var partidx int + var multires []byte + rp := parseArrayPath(path) + if !rp.arrch { + n, ok := parseUint(rp.part) + if !ok { + partidx = -1 + } else { + partidx = int(n) + } + } + for i < len(c.json) { + if !rp.arrch { + pmatch = partidx == h + hit = pmatch && !rp.more + } + h++ + if rp.alogok { + alog = append(alog, i) + } + for ; i < len(c.json); i++ { + switch c.json[i] { + default: + continue + case '"': + i++ + i, val, vesc, ok = parseString(c.json, i) + if !ok { + return i, false + } + if hit { + if rp.alogok { + break + } + if vesc { + c.value.Str = unescape(val[1 : len(val)-1]) + } else { + c.value.Str = val[1 : len(val)-1] + } + c.value.Raw = val + c.value.Type = String + return i, true + } + case '{': + if pmatch && !hit { + i, hit = parseObject(c, i+1, rp.path) + if hit { + if rp.alogok { + break + } + return i, true + } + } else { + i, val = parseSquash(c.json, i) + if rp.query.on { + res := Get(val, rp.query.path) + if queryMatches(&rp, res) { + if rp.more { + res = Get(val, rp.path) + } else { + res = Result{Raw: val, Type: JSON} + } + if rp.query.all { + if len(multires) == 0 { + multires = append(multires, '[') + } else { + multires = append(multires, ',') + } + multires = append(multires, res.Raw...) + } else { + c.value = res + return i, true + } + } + } else if hit { + if rp.alogok { + break + } + c.value.Raw = val + c.value.Type = JSON + return i, true + } + } + case '[': + if pmatch && !hit { + i, hit = parseArray(c, i+1, rp.path) + if hit { + if rp.alogok { + break + } + return i, true + } + } else { + i, val = parseSquash(c.json, i) + if hit { + if rp.alogok { + break + } + c.value.Raw = val + c.value.Type = JSON + return i, true + } + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + i, val = parseNumber(c.json, i) + if hit { + if rp.alogok { + break + } + c.value.Raw = val + c.value.Type = Number + c.value.Num, _ = strconv.ParseFloat(val, 64) + return i, true + } + case 't', 'f', 'n': + vc := c.json[i] + i, val = parseLiteral(c.json, i) + if hit { + if rp.alogok { + break + } + c.value.Raw = val + switch vc { + case 't': + c.value.Type = True + case 'f': + c.value.Type = False + } + return i, true + } + case ']': + if rp.arrch && rp.part == "#" { + if rp.alogok { + var jsons = make([]byte, 0, 64) + jsons = append(jsons, '[') + for j, k := 0, 0; j < len(alog); j++ { + res := Get(c.json[alog[j]:], rp.alogkey) + if res.Exists() { + if k > 0 { + jsons = append(jsons, ',') + } + jsons = append(jsons, []byte(res.Raw)...) + k++ + } + } + jsons = append(jsons, ']') + c.value.Type = JSON + c.value.Raw = string(jsons) + return i + 1, true + } + if rp.alogok { + break + } + c.value.Raw = val + c.value.Type = Number + c.value.Num = float64(h - 1) + c.calcd = true + return i + 1, true + } + if len(multires) > 0 && !c.value.Exists() { + c.value = Result{ + Raw: string(append(multires, ']')), + Type: JSON, + } + } + return i + 1, false + } + break + } + } + return i, false +} + +type parseContext struct { + json string + value Result + calcd bool +} + +// Get searches json for the specified path. +// A path is in dot syntax, such as "name.last" or "age". +// This function expects that the json is well-formed, and does not validate. +// Invalid json will not panic, but it may return back unexpected results. +// When the value is found it's returned immediately. +// +// A path is a series of keys searated by a dot. +// A key may contain special wildcard characters '*' and '?'. +// To access an array value use the index as the key. +// To get the number of elements in an array or to access a child path, use the '#' character. +// The dot and wildcard character can be escaped with '\'. +// +// { +// "name": {"first": "Tom", "last": "Anderson"}, +// "age":37, +// "children": ["Sara","Alex","Jack"], +// "friends": [ +// {"first": "James", "last": "Murphy"}, +// {"first": "Roger", "last": "Craig"} +// ] +// } +// "name.last" >> "Anderson" +// "age" >> 37 +// "children" >> ["Sara","Alex","Jack"] +// "children.#" >> 3 +// "children.1" >> "Alex" +// "child*.2" >> "Jack" +// "c?ildren.0" >> "Sara" +// "friends.#.first" >> ["James","Roger"] +// +func Get(json, path string) Result { + var i int + var c = &parseContext{json: json} + for ; i < len(c.json); i++ { + if c.json[i] == '{' { + i++ + parseObject(c, i, path) + break + } + if c.json[i] == '[' { + i++ + parseArray(c, i, path) + break + } + } + if len(c.value.Raw) > 0 && !c.calcd { + jhdr := *(*reflect.StringHeader)(unsafe.Pointer(&json)) + rhdr := *(*reflect.StringHeader)(unsafe.Pointer(&(c.value.Raw))) + c.value.Index = int(rhdr.Data - jhdr.Data) + if c.value.Index < 0 || c.value.Index >= len(json) { + c.value.Index = 0 + } + } + return c.value +} +func fromBytesGet(result Result) Result { + // safely get the string headers + rawhi := *(*reflect.StringHeader)(unsafe.Pointer(&result.Raw)) + strhi := *(*reflect.StringHeader)(unsafe.Pointer(&result.Str)) + // create byte slice headers + rawh := reflect.SliceHeader{Data: rawhi.Data, Len: rawhi.Len} + strh := reflect.SliceHeader{Data: strhi.Data, Len: strhi.Len} + if strh.Data == 0 { + // str is nil + if rawh.Data == 0 { + // raw is nil + result.Raw = "" + } else { + // raw has data, safely copy the slice header to a string + result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh))) + } + result.Str = "" + } else if rawh.Data == 0 { + // raw is nil + result.Raw = "" + // str has data, safely copy the slice header to a string + result.Str = string(*(*[]byte)(unsafe.Pointer(&strh))) + } else if strh.Data >= rawh.Data && + int(strh.Data)+strh.Len <= int(rawh.Data)+rawh.Len { + // Str is a substring of Raw. + start := int(strh.Data - rawh.Data) + // safely copy the raw slice header + result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh))) + // substring the raw + result.Str = result.Raw[start : start+strh.Len] + } else { + // safely copy both the raw and str slice headers to strings + result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh))) + result.Str = string(*(*[]byte)(unsafe.Pointer(&strh))) + } + return result +} + +// GetBytes searches json for the specified path. +// If working with bytes, this method preferred over Get(string(data), path) +func GetBytes(json []byte, path string) Result { + var result Result + if json != nil { + // unsafe cast to string + result = Get(*(*string)(unsafe.Pointer(&json)), path) + result = fromBytesGet(result) + } + return result +} + +// runeit returns the rune from the the \uXXXX +func runeit(json string) rune { + n, _ := strconv.ParseUint(json[:4], 16, 64) + return rune(n) +} + +// unescape unescapes a string +func unescape(json string) string { //, error) { + var str = make([]byte, 0, len(json)) + for i := 0; i < len(json); i++ { + switch { + default: + str = append(str, json[i]) + case json[i] < ' ': + return string(str) + case json[i] == '\\': + i++ + if i >= len(json) { + return string(str) + } + switch json[i] { + default: + return string(str) + case '\\': + str = append(str, '\\') + case '/': + str = append(str, '/') + case 'b': + str = append(str, '\b') + case 'f': + str = append(str, '\f') + case 'n': + str = append(str, '\n') + case 'r': + str = append(str, '\r') + case 't': + str = append(str, '\t') + case '"': + str = append(str, '"') + case 'u': + if i+5 > len(json) { + return string(str) + } + r := runeit(json[i+1:]) + i += 5 + if utf16.IsSurrogate(r) { + // need another code + if len(json[i:]) >= 6 && json[i] == '\\' && json[i+1] == 'u' { + // we expect it to be correct so just consume it + r = utf16.DecodeRune(r, runeit(json[i+2:])) + i += 6 + } + } + // provide enough space to encode the largest utf8 possible + str = append(str, 0, 0, 0, 0, 0, 0, 0, 0) + n := utf8.EncodeRune(str[len(str)-8:], r) + str = str[:len(str)-8+n] + i-- // backtrack index by one + } + } + } + return string(str) +} + +// Less return true if a token is less than another token. +// The caseSensitive paramater is used when the tokens are Strings. +// The order when comparing two different type is: +// +// Null < False < Number < String < True < JSON +// +func (t Result) Less(token Result, caseSensitive bool) bool { + if t.Type < token.Type { + return true + } + if t.Type > token.Type { + return false + } + if t.Type == String { + if caseSensitive { + return t.Str < token.Str + } + return stringLessInsensitive(t.Str, token.Str) + } + if t.Type == Number { + return t.Num < token.Num + } + return t.Raw < token.Raw +} + +func stringLessInsensitive(a, b string) bool { + for i := 0; i < len(a) && i < len(b); i++ { + if a[i] >= 'A' && a[i] <= 'Z' { + if b[i] >= 'A' && b[i] <= 'Z' { + // both are uppercase, do nothing + if a[i] < b[i] { + return true + } else if a[i] > b[i] { + return false + } + } else { + // a is uppercase, convert a to lowercase + if a[i]+32 < b[i] { + return true + } else if a[i]+32 > b[i] { + return false + } + } + } else if b[i] >= 'A' && b[i] <= 'Z' { + // b is uppercase, convert b to lowercase + if a[i] < b[i]+32 { + return true + } else if a[i] > b[i]+32 { + return false + } + } else { + // neither are uppercase + if a[i] < b[i] { + return true + } else if a[i] > b[i] { + return false + } + } + } + return len(a) < len(b) +} + +// parseAny parses the next value from a json string. +// A Result is returned when the hit param is set. +// The return values are (i int, res Result, ok bool) +func parseAny(json string, i int, hit bool) (int, Result, bool) { + var res Result + var val string + for ; i < len(json); i++ { + if json[i] == '{' || json[i] == '[' { + i, val = parseSquash(json, i) + if hit { + res.Raw = val + res.Type = JSON + } + return i, res, true + } + if json[i] <= ' ' { + continue + } + switch json[i] { + case '"': + i++ + var vesc bool + var ok bool + i, val, vesc, ok = parseString(json, i) + if !ok { + return i, res, false + } + if hit { + res.Type = String + res.Raw = val + if vesc { + res.Str = unescape(val[1 : len(val)-1]) + } else { + res.Str = val[1 : len(val)-1] + } + } + return i, res, true + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + i, val = parseNumber(json, i) + if hit { + res.Raw = val + res.Type = Number + res.Num, _ = strconv.ParseFloat(val, 64) + } + return i, res, true + case 't', 'f', 'n': + vc := json[i] + i, val = parseLiteral(json, i) + if hit { + res.Raw = val + switch vc { + case 't': + res.Type = True + case 'f': + res.Type = False + } + return i, res, true + } + } + } + return i, res, false +} + +var ( // used for testing + testWatchForFallback bool + testLastWasFallback bool +) + +// areSimplePaths returns true if all the paths are simple enough +// to parse quickly for GetMany(). Allows alpha-numeric, dots, +// underscores, and the dollar sign. It does not allow non-alnum, +// escape characters, or keys which start with a numbers. +// For example: +// "name.last" == OK +// "user.id0" == OK +// "user.ID" == OK +// "user.first_name" == OK +// "user.firstName" == OK +// "user.0item" == BAD +// "user.#id" == BAD +// "user\.name" == BAD +func areSimplePaths(paths []string) bool { + for _, path := range paths { + var fi int // first key index, for keys with numeric prefix + for i := 0; i < len(path); i++ { + if path[i] >= 'a' && path[i] <= 'z' { + // a-z is likely to be the highest frequency charater. + continue + } + if path[i] == '.' { + fi = i + 1 + continue + } + if path[i] >= 'A' && path[i] <= 'Z' { + continue + } + if path[i] == '_' || path[i] == '$' { + continue + } + if i > fi && path[i] >= '0' && path[i] <= '9' { + continue + } + return false + } + } + return true +} + +// GetMany searches json for the multiple paths. +// The return value is a Result array where the number of items +// will be equal to the number of input paths. +func GetMany(json string, paths ...string) []Result { + if len(paths) < 4 { + if testWatchForFallback { + testLastWasFallback = false + } + switch len(paths) { + case 0: + // return nil when no paths are specified. + return nil + case 1: + return []Result{Get(json, paths[0])} + case 2: + return []Result{Get(json, paths[0]), Get(json, paths[1])} + case 3: + return []Result{Get(json, paths[0]), Get(json, paths[1]), Get(json, paths[2])} + } + } + var results []Result + var ok bool + var i int + if len(paths) > 512 { + // we can only support up to 512 paths. Is that too many? + goto fallback + } + if !areSimplePaths(paths) { + // If there is even one path that is not considered "simple" then + // we need to use the fallback method. + goto fallback + } + // locate the object token. + for ; i < len(json); i++ { + if json[i] == '{' { + i++ + break + } + if json[i] <= ' ' { + continue + } + goto fallback + } + // use the call function table. + if len(paths) <= 8 { + results, ok = getMany8(json, i, paths) + } else if len(paths) <= 16 { + results, ok = getMany16(json, i, paths) + } else if len(paths) <= 32 { + results, ok = getMany32(json, i, paths) + } else if len(paths) <= 64 { + results, ok = getMany64(json, i, paths) + } else if len(paths) <= 128 { + results, ok = getMany128(json, i, paths) + } else if len(paths) <= 256 { + results, ok = getMany256(json, i, paths) + } else if len(paths) <= 512 { + results, ok = getMany512(json, i, paths) + } + if !ok { + // there was some fault while parsing. we should try the + // fallback method. This could result in performance + // degregation in some cases. + goto fallback + } + if testWatchForFallback { + testLastWasFallback = false + } + return results +fallback: + results = results[:0] + for i := 0; i < len(paths); i++ { + results = append(results, Get(json, paths[i])) + } + if testWatchForFallback { + testLastWasFallback = true + } + return results +} + +// GetManyBytes searches json for the specified path. +// If working with bytes, this method preferred over +// GetMany(string(data), paths...) +func GetManyBytes(json []byte, paths ...string) []Result { + if json == nil { + return GetMany("", paths...) + } + results := GetMany(*(*string)(unsafe.Pointer(&json)), paths...) + for i := range results { + results[i] = fromBytesGet(results[i]) + } + return results +} + +// parseGetMany parses a json object for keys that match against the callers +// paths. It's a best-effort attempt and quickly locating and assigning the +// values to the []Result array. If there are failures such as bad json, or +// invalid input paths, or too much recursion, the function will exit with a +// return value of 'false'. +func parseGetMany( + json string, i int, + level uint, kplen int, + paths []string, completed []bool, matches []uint64, results []Result, +) (int, bool) { + if level > 62 { + // The recursion level is limited because the matches []uint64 + // array cannot handle more the 64-bits. + return i, false + } + // At this point the last character read was a '{'. + // Read all object keys and try to match against the paths. + var key string + var val string + var vesc, ok bool +next_key: + for ; i < len(json); i++ { + if json[i] == '"' { + // read the key + i, val, vesc, ok = parseString(json, i+1) + if !ok { + return i, false + } + if vesc { + // the value is escaped + key = unescape(val[1 : len(val)-1]) + } else { + // just a plain old ascii key + key = val[1 : len(val)-1] + } + var hasMatch bool + var parsedVal bool + var valOrgIndex int + var valPathIndex int + for j := 0; j < len(key); j++ { + if key[j] == '.' { + // we need to look for keys with dot and ignore them. + if i, _, ok = parseAny(json, i, false); !ok { + return i, false + } + continue next_key + } + } + var usedPaths int + // loop through paths and look for matches + for j := 0; j < len(paths); j++ { + if completed[j] { + usedPaths++ + // ignore completed paths + continue + } + if level > 0 && (matches[j]>>(level-1))&1 == 0 { + // ignore unmatched paths + usedPaths++ + continue + } + // try to match the key to the path + // this is spaghetti code but the idea is to minimize + // calls and variable assignments when comparing the + // key to paths + if len(paths[j])-kplen >= len(key) { + i, k := kplen, 0 + for ; k < len(key); k, i = k+1, i+1 { + if key[k] != paths[j][i] { + // no match + goto nomatch + } + } + if i < len(paths[j]) { + if paths[j][i] == '.' { + // matched, but there are still more keys in path + goto match_not_atend + } + } + if len(paths[j]) <= len(key) || kplen != 0 { + if len(paths[j]) != i { + goto nomatch + } + // matched and at the end of the path + goto match_atend + } + } + // no match, jump to the nomatch label + goto nomatch + match_atend: + // found a match + // at the end of the path. we must take the value. + usedPaths++ + if !parsedVal { + // the value has not been parsed yet. let's do so. + valOrgIndex = i // keep track of the current position. + i, results[j], ok = parseAny(json, i, true) + if !ok { + return i, false + } + parsedVal = true + valPathIndex = j + } else { + results[j] = results[valPathIndex] + } + // mark as complete + completed[j] = true + // jump over the match_not_atend label + goto nomatch + match_not_atend: + // found a match + // still in the middle of the path. + usedPaths++ + // mark the path as matched + matches[j] |= 1 << level + if !hasMatch { + hasMatch = true + } + nomatch: // noop label + } + + if !hasMatch && i < len(json) && json[i] == '}' { + return i + 1, true + } + if !parsedVal { + if hasMatch { + // we found a match and the value has not been parsed yet. + // let's find out if the next value type is an object. + for ; i < len(json); i++ { + if json[i] <= ' ' || json[i] == ':' { + continue + } + break + } + if i < len(json) { + if json[i] == '{' { + // it's an object. let's go deeper + i, ok = parseGetMany(json, i+1, level+1, kplen+len(key)+1, paths, completed, matches, results) + if !ok { + return i, false + } + } else { + // not an object. just parse and ignore. + if i, _, ok = parseAny(json, i, false); !ok { + return i, false + } + } + } + } else { + // Since there was no matches we can just parse the value and + // ignore the result. + if i, _, ok = parseAny(json, i, false); !ok { + return i, false + } + } + } else if hasMatch && len(results[valPathIndex].Raw) > 0 && results[valPathIndex].Raw[0] == '{' { + // The value was already parsed and the value type is an object. + // Rewind the json index and let's parse deeper. + i = valOrgIndex + for ; i < len(json); i++ { + if json[i] == '{' { + break + } + } + i, ok = parseGetMany(json, i+1, level+1, kplen+len(key)+1, paths, completed, matches, results) + if !ok { + return i, false + } + } + if usedPaths == len(paths) { + // all paths have been used, either completed or matched. + // we should stop parsing this object to save CPU cycles. + if level > 0 && i < len(json) { + i, _ = parseSquash(json, i) + } + return i, true + } + } else if json[i] == '}' { + // reached the end of the object. end it here. + return i + 1, true + } + } + return i, true +} + +// Call table for GetMany. Using an isolated function allows for allocating +// arrays with know capacities on the stack, as opposed to dynamically +// allocating on the heap. This can provide a tremendous performance boost +// by avoiding the GC. +func getMany8(json string, i int, paths []string) ([]Result, bool) { + const max = 8 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany16(json string, i int, paths []string) ([]Result, bool) { + const max = 16 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany32(json string, i int, paths []string) ([]Result, bool) { + const max = 32 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany64(json string, i int, paths []string) ([]Result, bool) { + const max = 64 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany128(json string, i int, paths []string) ([]Result, bool) { + const max = 128 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany256(json string, i int, paths []string) ([]Result, bool) { + const max = 256 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany512(json string, i int, paths []string) ([]Result, bool) { + const max = 512 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} + +var fieldsmu sync.RWMutex +var fields = make(map[string]map[string]int) + +func assign(jsval Result, goval reflect.Value) { + if jsval.Type == Null { + return + } + switch goval.Kind() { + default: + case reflect.Ptr: + if !goval.IsNil() { + newval := reflect.New(goval.Elem().Type()) + assign(jsval, newval.Elem()) + goval.Elem().Set(newval.Elem()) + } else { + newval := reflect.New(goval.Type().Elem()) + assign(jsval, newval.Elem()) + goval.Set(newval) + } + case reflect.Struct: + fieldsmu.RLock() + sf := fields[goval.Type().String()] + fieldsmu.RUnlock() + if sf == nil { + fieldsmu.Lock() + sf = make(map[string]int) + for i := 0; i < goval.Type().NumField(); i++ { + f := goval.Type().Field(i) + tag := strings.Split(f.Tag.Get("json"), ",")[0] + if tag != "-" { + if tag != "" { + sf[tag] = i + sf[f.Name] = i + } else { + sf[f.Name] = i + } + } + } + fields[goval.Type().String()] = sf + fieldsmu.Unlock() + } + jsval.ForEach(func(key, value Result) bool { + if idx, ok := sf[key.Str]; ok { + f := goval.Field(idx) + if f.CanSet() { + assign(value, f) + } + } + return true + }) + case reflect.Slice: + if goval.Type().Elem().Kind() == reflect.Uint8 && jsval.Type == String { + data, _ := base64.StdEncoding.DecodeString(jsval.String()) + goval.Set(reflect.ValueOf(data)) + } else { + jsvals := jsval.Array() + slice := reflect.MakeSlice(goval.Type(), len(jsvals), len(jsvals)) + for i := 0; i < len(jsvals); i++ { + assign(jsvals[i], slice.Index(i)) + } + goval.Set(slice) + } + case reflect.Array: + i, n := 0, goval.Len() + jsval.ForEach(func(_, value Result) bool { + if i == n { + return false + } + assign(value, goval.Index(i)) + i++ + return true + }) + case reflect.Map: + if goval.Type().Key().Kind() == reflect.String && goval.Type().Elem().Kind() == reflect.Interface { + goval.Set(reflect.ValueOf(jsval.Value())) + } + case reflect.Interface: + goval.Set(reflect.ValueOf(jsval.Value())) + case reflect.Bool: + goval.SetBool(jsval.Bool()) + case reflect.Float32, reflect.Float64: + goval.SetFloat(jsval.Float()) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + goval.SetInt(jsval.Int()) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + goval.SetUint(jsval.Uint()) + case reflect.String: + goval.SetString(jsval.String()) + } + if len(goval.Type().PkgPath()) > 0 { + v := goval.Addr() + if v.Type().NumMethod() > 0 { + if u, ok := v.Interface().(json.Unmarshaler); ok { + u.UnmarshalJSON([]byte(jsval.Raw)) + } + } + } +} + +var validate uintptr = 1 + +// UnmarshalValidationEnabled provides the option to disable JSON validation +// during the Unmarshal routine. Validation is enabled by default. +// +// Deprecated: Use encoder/json.Unmarshal instead +func UnmarshalValidationEnabled(enabled bool) { + if enabled { + atomic.StoreUintptr(&validate, 1) + } else { + atomic.StoreUintptr(&validate, 0) + } +} + +// Unmarshal loads the JSON data into the value pointed to by v. +// +// This function works almost identically to json.Unmarshal except that +// gjson.Unmarshal will automatically attempt to convert JSON values to any Go +// type. For example, the JSON string "100" or the JSON number 100 can be equally +// assigned to Go string, int, byte, uint64, etc. This rule applies to all types. +// +// Deprecated: Use encoder/json.Unmarshal instead +func Unmarshal(data []byte, v interface{}) error { + if atomic.LoadUintptr(&validate) == 1 { + _, ok := validpayload(data, 0) + if !ok { + return errors.New("invalid json") + } + } + if v := reflect.ValueOf(v); v.Kind() == reflect.Ptr { + assign(ParseBytes(data), v) + } + return nil +} + +func validpayload(data []byte, i int) (outi int, ok bool) { + for ; i < len(data); i++ { + switch data[i] { + default: + i, ok = validany(data, i) + if !ok { + return i, false + } + for ; i < len(data); i++ { + switch data[i] { + default: + return i, false + case ' ', '\t', '\n', '\r': + continue + } + } + return i, true + case ' ', '\t', '\n', '\r': + continue + } + } + return i, false +} +func validany(data []byte, i int) (outi int, ok bool) { + for ; i < len(data); i++ { + switch data[i] { + default: + return i, false + case ' ', '\t', '\n', '\r': + continue + case '{': + return validobject(data, i+1) + case '[': + return validarray(data, i+1) + case '"': + return validstring(data, i+1) + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return validnumber(data, i+1) + case 't': + return validtrue(data, i+1) + case 'f': + return validfalse(data, i+1) + case 'n': + return validnull(data, i+1) + } + } + return i, false +} +func validobject(data []byte, i int) (outi int, ok bool) { + for ; i < len(data); i++ { + switch data[i] { + default: + return i, false + case ' ', '\t', '\n', '\r': + continue + case '}': + return i + 1, true + case '"': + key: + if i, ok = validstring(data, i+1); !ok { + return i, false + } + if i, ok = validcolon(data, i); !ok { + return i, false + } + if i, ok = validany(data, i); !ok { + return i, false + } + if i, ok = validcomma(data, i, '}'); !ok { + return i, false + } + if data[i] == '}' { + return i + 1, true + } + for ; i < len(data); i++ { + if data[i] == '"' { + goto key + } + } + return i, false + } + } + return i, false +} +func validcolon(data []byte, i int) (outi int, ok bool) { + for ; i < len(data); i++ { + switch data[i] { + default: + return i, false + case ' ', '\t', '\n', '\r': + continue + case ':': + return i + 1, true + } + } + return i, false +} +func validcomma(data []byte, i int, end byte) (outi int, ok bool) { + for ; i < len(data); i++ { + switch data[i] { + default: + return i, false + case ' ', '\t', '\n', '\r': + continue + case ',': + return i, true + case end: + return i, true + } + } + return i, false +} +func validarray(data []byte, i int) (outi int, ok bool) { + for ; i < len(data); i++ { + switch data[i] { + default: + for ; i < len(data); i++ { + if i, ok = validany(data, i); !ok { + return i, false + } + if i, ok = validcomma(data, i, ']'); !ok { + return i, false + } + if data[i] == ']' { + return i + 1, true + } + } + case ' ', '\t', '\n', '\r': + continue + case ']': + return i + 1, true + } + } + return i, false +} +func validstring(data []byte, i int) (outi int, ok bool) { + for ; i < len(data); i++ { + if data[i] < ' ' { + return i, false + } else if data[i] == '\\' { + i++ + if i == len(data) { + return i, false + } + switch data[i] { + default: + return i, false + case '"', '\\', '/', 'b', 'f', 'n', 'r', 't': + case 'u': + for j := 0; j < 4; j++ { + i++ + if i >= len(data) { + return i, false + } + if !((data[i] >= '0' && data[i] <= '9') || + (data[i] >= 'a' && data[i] <= 'f') || + (data[i] >= 'A' && data[i] <= 'F')) { + return i, false + } + } + } + } else if data[i] == '"' { + return i + 1, true + } + } + return i, false +} +func validnumber(data []byte, i int) (outi int, ok bool) { + i-- + // sign + if data[i] == '-' { + i++ + } + // int + if i == len(data) { + return i, false + } + if data[i] == '0' { + i++ + } else { + for ; i < len(data); i++ { + if data[i] >= '0' && data[i] <= '9' { + continue + } + break + } + } + // frac + if i == len(data) { + return i, true + } + if data[i] == '.' { + i++ + if i == len(data) { + return i, false + } + if data[i] < '0' || data[i] > '9' { + return i, false + } + i++ + for ; i < len(data); i++ { + if data[i] >= '0' && data[i] <= '9' { + continue + } + break + } + } + // exp + if i == len(data) { + return i, true + } + if data[i] == 'e' || data[i] == 'E' { + i++ + if i == len(data) { + return i, false + } + if data[i] == '+' || data[i] == '-' { + i++ + } + if i == len(data) { + return i, false + } + if data[i] < '0' || data[i] > '9' { + return i, false + } + i++ + for ; i < len(data); i++ { + if data[i] >= '0' && data[i] <= '9' { + continue + } + break + } + } + return i, true +} + +func validtrue(data []byte, i int) (outi int, ok bool) { + if i+3 <= len(data) && data[i] == 'r' && data[i+1] == 'u' && data[i+2] == 'e' { + return i + 3, true + } + return i, false +} +func validfalse(data []byte, i int) (outi int, ok bool) { + if i+4 <= len(data) && data[i] == 'a' && data[i+1] == 'l' && data[i+2] == 's' && data[i+3] == 'e' { + return i + 4, true + } + return i, false +} +func validnull(data []byte, i int) (outi int, ok bool) { + if i+3 <= len(data) && data[i] == 'u' && data[i+1] == 'l' && data[i+2] == 'l' { + return i + 3, true + } + return i, false +} + +// Valid returns true if the input is valid json. +func Valid(json string) bool { + _, ok := validpayload([]byte(json), 0) + return ok +} + +func parseUint(s string) (n uint64, ok bool) { + var i int + if i == len(s) { + return 0, false + } + for ; i < len(s); i++ { + if s[i] >= '0' && s[i] <= '9' { + n = n*10 + uint64(s[i]-'0') + } else { + return 0, false + } + } + return n, true +} + +func parseInt(s string) (n int64, ok bool) { + var i int + var sign bool + if len(s) > 0 && s[0] == '-' { + sign = true + i++ + } + if i == len(s) { + return 0, false + } + for ; i < len(s); i++ { + if s[i] >= '0' && s[i] <= '9' { + n = n*10 + int64(s[i]-'0') + } else { + return 0, false + } + } + if sign { + return n * -1, true + } + return n, true +} + +const minUint53 = 0 +const maxUint53 = 4503599627370495 +const minInt53 = -2251799813685248 +const maxInt53 = 2251799813685247 + +func floatToUint(f float64) (n uint64, ok bool) { + n = uint64(f) + if float64(n) == f && n >= minUint53 && n <= maxUint53 { + return n, true + } + return 0, false +} + +func floatToInt(f float64) (n int64, ok bool) { + n = int64(f) + if float64(n) == f && n >= minInt53 && n <= maxInt53 { + return n, true + } + return 0, false +} diff --git a/vendor/src/github.com/tidwall/gjson/gjson_test.go b/vendor/src/github.com/tidwall/gjson/gjson_test.go new file mode 100644 index 00000000..12cb2449 --- /dev/null +++ b/vendor/src/github.com/tidwall/gjson/gjson_test.go @@ -0,0 +1,1112 @@ +package gjson + +import ( + "bytes" + "encoding/hex" + "encoding/json" + "fmt" + "math/rand" + "reflect" + "strings" + "testing" + "time" +) + +// TestRandomData is a fuzzing test that throws random data at the Parse +// function looking for panics. +func TestRandomData(t *testing.T) { + var lstr string + defer func() { + if v := recover(); v != nil { + println("'" + hex.EncodeToString([]byte(lstr)) + "'") + println("'" + lstr + "'") + panic(v) + } + }() + rand.Seed(time.Now().UnixNano()) + b := make([]byte, 200) + for i := 0; i < 2000000; i++ { + n, err := rand.Read(b[:rand.Int()%len(b)]) + if err != nil { + t.Fatal(err) + } + lstr = string(b[:n]) + GetBytes([]byte(lstr), "zzzz") + Parse(lstr) + } +} + +func TestRandomValidStrings(t *testing.T) { + rand.Seed(time.Now().UnixNano()) + b := make([]byte, 200) + for i := 0; i < 100000; i++ { + n, err := rand.Read(b[:rand.Int()%len(b)]) + if err != nil { + t.Fatal(err) + } + sm, err := json.Marshal(string(b[:n])) + if err != nil { + t.Fatal(err) + } + var su string + if err := json.Unmarshal([]byte(sm), &su); err != nil { + t.Fatal(err) + } + token := Get(`{"str":`+string(sm)+`}`, "str") + if token.Type != String || token.Str != su { + println("["+token.Raw+"]", "["+token.Str+"]", "["+su+"]", "["+string(sm)+"]") + t.Fatal("string mismatch") + } + } +} + +func TestEmoji(t *testing.T) { + const input = `{"utf8":"Example emoji, KO: \ud83d\udd13, \ud83c\udfc3 OK: \u2764\ufe0f "}` + value := Get(input, "utf8") + var s string + json.Unmarshal([]byte(value.Raw), &s) + if value.String() != s { + t.Fatalf("expected '%v', got '%v'", s, value.String()) + } +} + +func testEscapePath(t *testing.T, json, path, expect string) { + if Get(json, path).String() != expect { + t.Fatalf("expected '%v', got '%v'", expect, Get(json, path).String()) + } +} + +func TestEscapePath(t *testing.T) { + json := `{ + "test":{ + "*":"valZ", + "*v":"val0", + "keyv*":"val1", + "key*v":"val2", + "keyv?":"val3", + "key?v":"val4", + "keyv.":"val5", + "key.v":"val6", + "keyk*":{"key?":"val7"} + } + }` + + testEscapePath(t, json, "test.\\*", "valZ") + testEscapePath(t, json, "test.\\*v", "val0") + testEscapePath(t, json, "test.keyv\\*", "val1") + testEscapePath(t, json, "test.key\\*v", "val2") + testEscapePath(t, json, "test.keyv\\?", "val3") + testEscapePath(t, json, "test.key\\?v", "val4") + testEscapePath(t, json, "test.keyv\\.", "val5") + testEscapePath(t, json, "test.key\\.v", "val6") + testEscapePath(t, json, "test.keyk\\*.key\\?", "val7") +} + +// this json block is poorly formed on purpose. +var basicJSON = `{"age":100, "name":{"here":"B\\\"R"}, + "noop":{"what is a wren?":"a bird"}, + "happy":true,"immortal":false, + "items":[1,2,3,{"tags":[1,2,3],"points":[[1,2],[3,4]]},4,5,6,7], + "arr":["1",2,"3",{"hello":"world"},"4",5], + "vals":[1,2,3,{"sadf":sdf"asdf"}],"name":{"first":"tom","last":null}, + "created":"2014-05-16T08:28:06.989Z", + "loggy":{ + "programmers": [ + { + "firstName": "Brett", + "lastName": "McLaughlin", + "email": "aaaa", + "tag": "good" + }, + { + "firstName": "Jason", + "lastName": "Hunter", + "email": "bbbb", + "tag": "bad" + }, + { + "firstName": "Elliotte", + "lastName": "Harold", + "email": "cccc", + "tag":, "good" + }, + { + "firstName": 1002.3, + "age": 101 + } + ] + }, + "lastly":{"yay":"final"} +}` +var basicJSONB = []byte(basicJSON) + +func TestTimeResult(t *testing.T) { + assert(t, Get(basicJSON, "created").String() == Get(basicJSON, "created").Time().Format(time.RFC3339Nano)) +} + +func TestParseAny(t *testing.T) { + assert(t, Parse("100").Float() == 100) + assert(t, Parse("true").Bool()) + assert(t, Parse("valse").Bool() == false) +} + +func TestManyVariousPathCounts(t *testing.T) { + json := `{"a":"a","b":"b","c":"c"}` + counts := []int{3, 4, 7, 8, 9, 15, 16, 17, 31, 32, 33, 63, 64, 65, 127, 128, 129, 255, 256, 257, 511, 512, 513} + paths := []string{"a", "b", "c"} + expects := []string{"a", "b", "c"} + for _, count := range counts { + var gpaths []string + var gexpects []string + for i := 0; i < count; i++ { + if i < len(paths) { + gpaths = append(gpaths, paths[i]) + gexpects = append(gexpects, expects[i]) + } else { + gpaths = append(gpaths, fmt.Sprintf("not%d", i)) + gexpects = append(gexpects, "null") + } + } + results := GetMany(json, gpaths...) + for i := 0; i < len(paths); i++ { + if results[i].String() != expects[i] { + t.Fatalf("expected '%v', got '%v'", expects[i], results[i].String()) + } + } + } +} +func TestManyRecursion(t *testing.T) { + var json string + var path string + for i := 0; i < 100; i++ { + json += `{"a":` + path += ".a" + } + json += `"b"` + for i := 0; i < 100; i++ { + json += `}` + } + path = path[1:] + assert(t, GetMany(json, path)[0].String() == "b") +} +func TestByteSafety(t *testing.T) { + jsonb := []byte(`{"name":"Janet","age":38}`) + mtok := GetBytes(jsonb, "name") + if mtok.String() != "Janet" { + t.Fatalf("expected %v, got %v", "Jason", mtok.String()) + } + mtok2 := GetBytes(jsonb, "age") + if mtok2.Raw != "38" { + t.Fatalf("expected %v, got %v", "Jason", mtok2.Raw) + } + jsonb[9] = 'T' + jsonb[12] = 'd' + jsonb[13] = 'y' + if mtok.String() != "Janet" { + t.Fatalf("expected %v, got %v", "Jason", mtok.String()) + } +} + +func get(json, path string) Result { + return GetBytes([]byte(json), path) +} + +func TestBasic(t *testing.T) { + var mtok Result + mtok = get(basicJSON, `loggy.programmers.#[tag="good"].firstName`) + if mtok.String() != "Brett" { + t.Fatalf("expected %v, got %v", "Brett", mtok.String()) + } + mtok = get(basicJSON, `loggy.programmers.#[tag="good"]#.firstName`) + if mtok.String() != `["Brett","Elliotte"]` { + t.Fatalf("expected %v, got %v", `["Brett","Elliotte"]`, mtok.String()) + } +} + +func TestIsArrayIsObject(t *testing.T) { + mtok := get(basicJSON, "loggy") + assert(t, mtok.IsObject()) + assert(t, !mtok.IsArray()) + + mtok = get(basicJSON, "loggy.programmers") + assert(t, !mtok.IsObject()) + assert(t, mtok.IsArray()) + + mtok = get(basicJSON, `loggy.programmers.#[tag="good"]#.firstName`) + assert(t, mtok.IsArray()) + + mtok = get(basicJSON, `loggy.programmers.0.firstName`) + assert(t, !mtok.IsObject()) + assert(t, !mtok.IsArray()) +} + +func TestPlus53BitInts(t *testing.T) { + json := `{"IdentityData":{"GameInstanceId":634866135153775564}}` + value := Get(json, "IdentityData.GameInstanceId") + assert(t, value.Uint() == 634866135153775564) + assert(t, value.Int() == 634866135153775564) + assert(t, value.Float() == 634866135153775616) + + json = `{"IdentityData":{"GameInstanceId":634866135153775564.88172}}` + value = Get(json, "IdentityData.GameInstanceId") + assert(t, value.Uint() == 634866135153775616) + assert(t, value.Int() == 634866135153775616) + assert(t, value.Float() == 634866135153775616.88172) + + json = `{ + "min_uint64": 0, + "max_uint64": 18446744073709551615, + "overflow_uint64": 18446744073709551616, + "min_int64": -9223372036854775808, + "max_int64": 9223372036854775807, + "overflow_int64": 9223372036854775808, + "min_uint53": 0, + "max_uint53": 4503599627370495, + "overflow_uint53": 4503599627370496, + "min_int53": -2251799813685248, + "max_int53": 2251799813685247, + "overflow_int53": 2251799813685248 + }` + + assert(t, Get(json, "min_uint53").Uint() == 0) + assert(t, Get(json, "max_uint53").Uint() == 4503599627370495) + assert(t, Get(json, "overflow_uint53").Int() == 4503599627370496) + assert(t, Get(json, "min_int53").Int() == -2251799813685248) + assert(t, Get(json, "max_int53").Int() == 2251799813685247) + assert(t, Get(json, "overflow_int53").Int() == 2251799813685248) + assert(t, Get(json, "min_uint64").Uint() == 0) + assert(t, Get(json, "max_uint64").Uint() == 18446744073709551615) + // this next value overflows the max uint64 by one which will just + // flip the number to zero + assert(t, Get(json, "overflow_uint64").Int() == 0) + assert(t, Get(json, "min_int64").Int() == -9223372036854775808) + assert(t, Get(json, "max_int64").Int() == 9223372036854775807) + // this next value overflows the max int64 by one which will just + // flip the number to the negative sign. + assert(t, Get(json, "overflow_int64").Int() == -9223372036854775808) +} +func TestIssue38(t *testing.T) { + // These should not fail, even though the unicode is invalid. + Get(`["S3O PEDRO DO BUTI\udf93"]`, "0") + Get(`["S3O PEDRO DO BUTI\udf93asdf"]`, "0") + Get(`["S3O PEDRO DO BUTI\udf93\u"]`, "0") + Get(`["S3O PEDRO DO BUTI\udf93\u1"]`, "0") + Get(`["S3O PEDRO DO BUTI\udf93\u13"]`, "0") + Get(`["S3O PEDRO DO BUTI\udf93\u134"]`, "0") + Get(`["S3O PEDRO DO BUTI\udf93\u1345"]`, "0") + Get(`["S3O PEDRO DO BUTI\udf93\u1345asd"]`, "0") +} +func TestTypes(t *testing.T) { + assert(t, (Result{Type: String}).Type.String() == "String") + assert(t, (Result{Type: Number}).Type.String() == "Number") + assert(t, (Result{Type: Null}).Type.String() == "Null") + assert(t, (Result{Type: False}).Type.String() == "False") + assert(t, (Result{Type: True}).Type.String() == "True") + assert(t, (Result{Type: JSON}).Type.String() == "JSON") + assert(t, (Result{Type: 100}).Type.String() == "") + // bool + assert(t, (Result{Type: String, Str: "true"}).Bool()) + assert(t, (Result{Type: True}).Bool()) + assert(t, (Result{Type: False}).Bool() == false) + assert(t, (Result{Type: Number, Num: 1}).Bool()) + // int + assert(t, (Result{Type: String, Str: "1"}).Int() == 1) + assert(t, (Result{Type: True}).Int() == 1) + assert(t, (Result{Type: False}).Int() == 0) + assert(t, (Result{Type: Number, Num: 1}).Int() == 1) + // uint + assert(t, (Result{Type: String, Str: "1"}).Uint() == 1) + assert(t, (Result{Type: True}).Uint() == 1) + assert(t, (Result{Type: False}).Uint() == 0) + assert(t, (Result{Type: Number, Num: 1}).Uint() == 1) + // float + assert(t, (Result{Type: String, Str: "1"}).Float() == 1) + assert(t, (Result{Type: True}).Float() == 1) + assert(t, (Result{Type: False}).Float() == 0) + assert(t, (Result{Type: Number, Num: 1}).Float() == 1) +} +func TestForEach(t *testing.T) { + Result{}.ForEach(nil) + Result{Type: String, Str: "Hello"}.ForEach(func(_, value Result) bool { + assert(t, value.String() == "Hello") + return false + }) + Result{Type: JSON, Raw: "*invalid*"}.ForEach(nil) + + json := ` {"name": {"first": "Janet","last": "Prichard"}, + "asd\nf":"\ud83d\udd13","age": 47}` + var count int + ParseBytes([]byte(json)).ForEach(func(key, value Result) bool { + count++ + return true + }) + assert(t, count == 3) + ParseBytes([]byte(`{"bad`)).ForEach(nil) + ParseBytes([]byte(`{"ok":"bad`)).ForEach(nil) +} +func TestMap(t *testing.T) { + assert(t, len(ParseBytes([]byte(`"asdf"`)).Map()) == 0) + assert(t, ParseBytes([]byte(`{"asdf":"ghjk"`)).Map()["asdf"].String() == "ghjk") + assert(t, len(Result{Type: JSON, Raw: "**invalid**"}.Map()) == 0) + assert(t, Result{Type: JSON, Raw: "**invalid**"}.Value() == nil) + assert(t, Result{Type: JSON, Raw: "{"}.Map() != nil) +} +func TestBasic1(t *testing.T) { + mtok := get(basicJSON, `loggy.programmers`) + var count int + mtok.ForEach(func(key, value Result) bool { + if key.Exists() { + t.Fatalf("expected %v, got %v", false, key.Exists()) + } + count++ + if count == 3 { + return false + } + if count == 1 { + i := 0 + value.ForEach(func(key, value Result) bool { + switch i { + case 0: + if key.String() != "firstName" || value.String() != "Brett" { + t.Fatalf("expected %v/%v got %v/%v", "firstName", "Brett", key.String(), value.String()) + } + case 1: + if key.String() != "lastName" || value.String() != "McLaughlin" { + t.Fatalf("expected %v/%v got %v/%v", "lastName", "McLaughlin", key.String(), value.String()) + } + case 2: + if key.String() != "email" || value.String() != "aaaa" { + t.Fatalf("expected %v/%v got %v/%v", "email", "aaaa", key.String(), value.String()) + } + } + i++ + return true + }) + } + return true + }) + if count != 3 { + t.Fatalf("expected %v, got %v", 3, count) + } +} +func TestBasic2(t *testing.T) { + mtok := get(basicJSON, `loggy.programmers.#[age=101].firstName`) + if mtok.String() != "1002.3" { + t.Fatalf("expected %v, got %v", "1002.3", mtok.String()) + } + mtok = get(basicJSON, `loggy.programmers.#[firstName != "Brett"].firstName`) + if mtok.String() != "Jason" { + t.Fatalf("expected %v, got %v", "Jason", mtok.String()) + } + mtok = get(basicJSON, `loggy.programmers.#[firstName % "Bre*"].email`) + if mtok.String() != "aaaa" { + t.Fatalf("expected %v, got %v", "aaaa", mtok.String()) + } + mtok = get(basicJSON, `loggy.programmers.#[firstName == "Brett"].email`) + if mtok.String() != "aaaa" { + t.Fatalf("expected %v, got %v", "aaaa", mtok.String()) + } + mtok = get(basicJSON, "loggy") + if mtok.Type != JSON { + t.Fatalf("expected %v, got %v", JSON, mtok.Type) + } + if len(mtok.Map()) != 1 { + t.Fatalf("expected %v, got %v", 1, len(mtok.Map())) + } + programmers := mtok.Map()["programmers"] + if programmers.Array()[1].Map()["firstName"].Str != "Jason" { + t.Fatalf("expected %v, got %v", "Jason", mtok.Map()["programmers"].Array()[1].Map()["firstName"].Str) + } +} +func TestBasic3(t *testing.T) { + var mtok Result + if Parse(basicJSON).Get("loggy.programmers").Get("1").Get("firstName").Str != "Jason" { + t.Fatalf("expected %v, got %v", "Jason", Parse(basicJSON).Get("loggy.programmers").Get("1").Get("firstName").Str) + } + var token Result + if token = Parse("-102"); token.Num != -102 { + t.Fatalf("expected %v, got %v", -102, token.Num) + } + if token = Parse("102"); token.Num != 102 { + t.Fatalf("expected %v, got %v", 102, token.Num) + } + if token = Parse("102.2"); token.Num != 102.2 { + t.Fatalf("expected %v, got %v", 102.2, token.Num) + } + if token = Parse(`"hello"`); token.Str != "hello" { + t.Fatalf("expected %v, got %v", "hello", token.Str) + } + if token = Parse(`"\"he\nllo\""`); token.Str != "\"he\nllo\"" { + t.Fatalf("expected %v, got %v", "\"he\nllo\"", token.Str) + } + mtok = get(basicJSON, "loggy.programmers.#.firstName") + if len(mtok.Array()) != 4 { + t.Fatalf("expected 4, got %v", len(mtok.Array())) + } + for i, ex := range []string{"Brett", "Jason", "Elliotte", "1002.3"} { + if mtok.Array()[i].String() != ex { + t.Fatalf("expected '%v', got '%v'", ex, mtok.Array()[i].String()) + } + } + mtok = get(basicJSON, "loggy.programmers.#.asd") + if mtok.Type != JSON { + t.Fatalf("expected %v, got %v", JSON, mtok.Type) + } + if len(mtok.Array()) != 0 { + t.Fatalf("expected 0, got %v", len(mtok.Array())) + } +} +func TestBasic4(t *testing.T) { + if get(basicJSON, "items.3.tags.#").Num != 3 { + t.Fatalf("expected 3, got %v", get(basicJSON, "items.3.tags.#").Num) + } + if get(basicJSON, "items.3.points.1.#").Num != 2 { + t.Fatalf("expected 2, got %v", get(basicJSON, "items.3.points.1.#").Num) + } + if get(basicJSON, "items.#").Num != 8 { + t.Fatalf("expected 6, got %v", get(basicJSON, "items.#").Num) + } + if get(basicJSON, "vals.#").Num != 4 { + t.Fatalf("expected 4, got %v", get(basicJSON, "vals.#").Num) + } + if !get(basicJSON, "name.last").Exists() { + t.Fatal("expected true, got false") + } + token := get(basicJSON, "name.here") + if token.String() != "B\\\"R" { + t.Fatal("expecting 'B\\\"R'", "got", token.String()) + } + token = get(basicJSON, "arr.#") + if token.String() != "6" { + t.Fatal("expecting '6'", "got", token.String()) + } + token = get(basicJSON, "arr.3.hello") + if token.String() != "world" { + t.Fatal("expecting 'world'", "got", token.String()) + } + _ = token.Value().(string) + token = get(basicJSON, "name.first") + if token.String() != "tom" { + t.Fatal("expecting 'tom'", "got", token.String()) + } + _ = token.Value().(string) + token = get(basicJSON, "name.last") + if token.String() != "" { + t.Fatal("expecting ''", "got", token.String()) + } + if token.Value() != nil { + t.Fatal("should be nil") + } +} +func TestBasic5(t *testing.T) { + token := get(basicJSON, "age") + if token.String() != "100" { + t.Fatal("expecting '100'", "got", token.String()) + } + _ = token.Value().(float64) + token = get(basicJSON, "happy") + if token.String() != "true" { + t.Fatal("expecting 'true'", "got", token.String()) + } + _ = token.Value().(bool) + token = get(basicJSON, "immortal") + if token.String() != "false" { + t.Fatal("expecting 'false'", "got", token.String()) + } + _ = token.Value().(bool) + token = get(basicJSON, "noop") + if token.String() != `{"what is a wren?":"a bird"}` { + t.Fatal("expecting '"+`{"what is a wren?":"a bird"}`+"'", "got", token.String()) + } + _ = token.Value().(map[string]interface{}) + + if get(basicJSON, "").Value() != nil { + t.Fatal("should be nil") + } + + get(basicJSON, "vals.hello") + + mm := Parse(basicJSON).Value().(map[string]interface{}) + fn := mm["loggy"].(map[string]interface{})["programmers"].([]interface{})[1].(map[string]interface{})["firstName"].(string) + if fn != "Jason" { + t.Fatalf("expecting %v, got %v", "Jason", fn) + } +} +func TestUnicode(t *testing.T) { + var json = `{"key":0,"的情况下解":{"key":1,"的情况":2}}` + if Get(json, "的情况下解.key").Num != 1 { + t.Fatal("fail") + } + if Get(json, "的情况下解.的情况").Num != 2 { + t.Fatal("fail") + } + if Get(json, "的情况下解.的?况").Num != 2 { + t.Fatal("fail") + } + if Get(json, "的情况下解.的?*").Num != 2 { + t.Fatal("fail") + } + if Get(json, "的情况下解.*?况").Num != 2 { + t.Fatal("fail") + } + if Get(json, "的情?下解.*?况").Num != 2 { + t.Fatal("fail") + } + if Get(json, "的情下解.*?况").Num != 0 { + t.Fatal("fail") + } +} + +func TestUnescape(t *testing.T) { + unescape(string([]byte{'\\', '\\', 0})) + unescape(string([]byte{'\\', '/', '\\', 'b', '\\', 'f'})) +} +func assert(t testing.TB, cond bool) { + if !cond { + panic("assert failed") + } +} +func TestLess(t *testing.T) { + assert(t, !Result{Type: Null}.Less(Result{Type: Null}, true)) + assert(t, Result{Type: Null}.Less(Result{Type: False}, true)) + assert(t, Result{Type: Null}.Less(Result{Type: True}, true)) + assert(t, Result{Type: Null}.Less(Result{Type: JSON}, true)) + assert(t, Result{Type: Null}.Less(Result{Type: Number}, true)) + assert(t, Result{Type: Null}.Less(Result{Type: String}, true)) + assert(t, !Result{Type: False}.Less(Result{Type: Null}, true)) + assert(t, Result{Type: False}.Less(Result{Type: True}, true)) + assert(t, Result{Type: String, Str: "abc"}.Less(Result{Type: String, Str: "bcd"}, true)) + assert(t, Result{Type: String, Str: "ABC"}.Less(Result{Type: String, Str: "abc"}, true)) + assert(t, !Result{Type: String, Str: "ABC"}.Less(Result{Type: String, Str: "abc"}, false)) + assert(t, Result{Type: Number, Num: 123}.Less(Result{Type: Number, Num: 456}, true)) + assert(t, !Result{Type: Number, Num: 456}.Less(Result{Type: Number, Num: 123}, true)) + assert(t, !Result{Type: Number, Num: 456}.Less(Result{Type: Number, Num: 456}, true)) + assert(t, stringLessInsensitive("abcde", "BBCDE")) + assert(t, stringLessInsensitive("abcde", "bBCDE")) + assert(t, stringLessInsensitive("Abcde", "BBCDE")) + assert(t, stringLessInsensitive("Abcde", "bBCDE")) + assert(t, !stringLessInsensitive("bbcde", "aBCDE")) + assert(t, !stringLessInsensitive("bbcde", "ABCDE")) + assert(t, !stringLessInsensitive("Bbcde", "aBCDE")) + assert(t, !stringLessInsensitive("Bbcde", "ABCDE")) + assert(t, !stringLessInsensitive("abcde", "ABCDE")) + assert(t, !stringLessInsensitive("Abcde", "ABCDE")) + assert(t, !stringLessInsensitive("abcde", "ABCDE")) + assert(t, !stringLessInsensitive("ABCDE", "ABCDE")) + assert(t, !stringLessInsensitive("abcde", "abcde")) + assert(t, !stringLessInsensitive("123abcde", "123Abcde")) + assert(t, !stringLessInsensitive("123Abcde", "123Abcde")) + assert(t, !stringLessInsensitive("123Abcde", "123abcde")) + assert(t, !stringLessInsensitive("123abcde", "123abcde")) + assert(t, !stringLessInsensitive("124abcde", "123abcde")) + assert(t, !stringLessInsensitive("124Abcde", "123Abcde")) + assert(t, !stringLessInsensitive("124Abcde", "123abcde")) + assert(t, !stringLessInsensitive("124abcde", "123abcde")) + assert(t, stringLessInsensitive("124abcde", "125abcde")) + assert(t, stringLessInsensitive("124Abcde", "125Abcde")) + assert(t, stringLessInsensitive("124Abcde", "125abcde")) + assert(t, stringLessInsensitive("124abcde", "125abcde")) +} + +func TestIssue6(t *testing.T) { + data := `{ + "code": 0, + "msg": "", + "data": { + "sz002024": { + "qfqday": [ + [ + "2014-01-02", + "8.93", + "9.03", + "9.17", + "8.88", + "621143.00" + ], + [ + "2014-01-03", + "9.03", + "9.30", + "9.47", + "8.98", + "1624438.00" + ] + ] + } + } + }` + + var num []string + for _, v := range Get(data, "data.sz002024.qfqday.0").Array() { + num = append(num, v.String()) + } + if fmt.Sprintf("%v", num) != "[2014-01-02 8.93 9.03 9.17 8.88 621143.00]" { + t.Fatalf("invalid result") + } +} + +var exampleJSON = `{ + "widget": { + "debug": "on", + "window": { + "title": "Sample Konfabulator Widget", + "name": "main_window", + "width": 500, + "height": 500 + }, + "image": { + "src": "Images/Sun.png", + "hOffset": 250, + "vOffset": 250, + "alignment": "center" + }, + "text": { + "data": "Click Here", + "size": 36, + "style": "bold", + "vOffset": 100, + "alignment": "center", + "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;" + } + } +}` + +func TestNewParse(t *testing.T) { + //fmt.Printf("%v\n", parse2(exampleJSON, "widget").String()) +} + +func TestUnmarshalMap(t *testing.T) { + var m1 = Parse(exampleJSON).Value().(map[string]interface{}) + var m2 map[string]interface{} + if err := json.Unmarshal([]byte(exampleJSON), &m2); err != nil { + t.Fatal(err) + } + b1, err := json.Marshal(m1) + if err != nil { + t.Fatal(err) + } + b2, err := json.Marshal(m2) + if err != nil { + t.Fatal(err) + } + if bytes.Compare(b1, b2) != 0 { + t.Fatal("b1 != b2") + } +} + +func TestSingleArrayValue(t *testing.T) { + var json = `{"key": "value","key2":[1,2,3,4,"A"]}` + var result = Get(json, "key") + var array = result.Array() + if len(array) != 1 { + t.Fatal("array is empty") + } + if array[0].String() != "value" { + t.Fatalf("got %s, should be %s", array[0].String(), "value") + } + + array = Get(json, "key2.#").Array() + if len(array) != 1 { + t.Fatalf("got '%v', expected '%v'", len(array), 1) + } + + array = Get(json, "key3").Array() + if len(array) != 0 { + t.Fatalf("got '%v', expected '%v'", len(array), 0) + } + +} + +var manyJSON = ` { + "a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{ + "a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{ + "a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{ + "a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{ + "a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{ + "a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{ + "a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"hello":"world" + }}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}} + "position":{"type":"Point","coordinates":[-115.24,33.09]}, + "loves":["world peace"], + "name":{"last":"Anderson","first":"Nancy"}, + "age":31 + "":{"a":"emptya","b":"emptyb"}, + "name.last":"Yellow", + "name.first":"Cat", +}` + +func combine(results []Result) string { + return fmt.Sprintf("%v", results) +} +func TestManyBasic(t *testing.T) { + testWatchForFallback = true + defer func() { + testWatchForFallback = false + }() + testMany := func(shouldFallback bool, expect string, paths ...string) { + results := GetManyBytes( + []byte(manyJSON), + paths..., + ) + if len(results) != len(paths) { + t.Fatalf("expected %v, got %v", len(paths), len(results)) + } + if fmt.Sprintf("%v", results) != expect { + fmt.Printf("%v\n", paths) + t.Fatalf("expected %v, got %v", expect, results) + } + //if testLastWasFallback != shouldFallback { + // t.Fatalf("expected %v, got %v", shouldFallback, testLastWasFallback) + //} + } + testMany(false, "[Point]", "position.type") + testMany(false, `[emptya ["world peace"] 31]`, ".a", "loves", "age") + testMany(false, `[["world peace"]]`, "loves") + testMany(false, `[{"last":"Anderson","first":"Nancy"} Nancy]`, "name", "name.first") + testMany(true, `[]`, strings.Repeat("a.", 40)+"hello") + res := Get(manyJSON, strings.Repeat("a.", 48)+"a") + testMany(true, `[`+res.String()+`]`, strings.Repeat("a.", 48)+"a") + // these should fallback + testMany(true, `[Cat Nancy]`, "name\\.first", "name.first") + testMany(true, `[world]`, strings.Repeat("a.", 70)+"hello") +} +func testMany(t *testing.T, json string, paths, expected []string) { + testManyAny(t, json, paths, expected, true) + testManyAny(t, json, paths, expected, false) +} +func testManyAny(t *testing.T, json string, paths, expected []string, bytes bool) { + var result []Result + for i := 0; i < 2; i++ { + var which string + if i == 0 { + which = "Get" + result = nil + for j := 0; j < len(expected); j++ { + if bytes { + result = append(result, GetBytes([]byte(json), paths[j])) + } else { + result = append(result, Get(json, paths[j])) + } + } + } else if i == 1 { + which = "GetMany" + if bytes { + result = GetManyBytes([]byte(json), paths...) + } else { + result = GetMany(json, paths...) + } + } + for j := 0; j < len(expected); j++ { + if result[j].String() != expected[j] { + t.Fatalf("Using key '%s' for '%s'\nexpected '%v', got '%v'", paths[j], which, expected[j], result[j].String()) + } + } + } +} +func TestIssue20(t *testing.T) { + json := `{ "name": "FirstName", "name1": "FirstName1", "address": "address1", "addressDetails": "address2", }` + paths := []string{"name", "name1", "address", "addressDetails"} + expected := []string{"FirstName", "FirstName1", "address1", "address2"} + t.Run("SingleMany", func(t *testing.T) { testMany(t, json, paths, expected) }) +} + +func TestIssue21(t *testing.T) { + json := `{ "Level1Field1":3, + "Level1Field4":4, + "Level1Field2":{ "Level2Field1":[ "value1", "value2" ], + "Level2Field2":{ "Level3Field1":[ { "key1":"value1" } ] } } }` + paths := []string{"Level1Field1", "Level1Field2.Level2Field1", "Level1Field2.Level2Field2.Level3Field1", "Level1Field4"} + expected := []string{"3", `[ "value1", "value2" ]`, `[ { "key1":"value1" } ]`, "4"} + t.Run("SingleMany", func(t *testing.T) { testMany(t, json, paths, expected) }) +} + +func TestRandomMany(t *testing.T) { + var lstr string + defer func() { + if v := recover(); v != nil { + println("'" + hex.EncodeToString([]byte(lstr)) + "'") + println("'" + lstr + "'") + panic(v) + } + }() + rand.Seed(time.Now().UnixNano()) + b := make([]byte, 512) + for i := 0; i < 50000; i++ { + n, err := rand.Read(b[:rand.Int()%len(b)]) + if err != nil { + t.Fatal(err) + } + lstr = string(b[:n]) + paths := make([]string, rand.Int()%64) + for i := range paths { + var b []byte + n := rand.Int() % 5 + for j := 0; j < n; j++ { + if j > 0 { + b = append(b, '.') + } + nn := rand.Int() % 10 + for k := 0; k < nn; k++ { + b = append(b, 'a'+byte(rand.Int()%26)) + } + } + paths[i] = string(b) + } + GetMany(lstr, paths...) + } +} + +type ComplicatedType struct { + unsettable int + Tagged string `json:"tagged"` + NotTagged bool + Nested struct { + Yellow string `json:"yellow"` + } + NestedTagged struct { + Green string + Map map[string]interface{} + Ints struct { + Int int `json:"int"` + Int8 int8 + Int16 int16 + Int32 int32 + Int64 int64 `json:"int64"` + } + Uints struct { + Uint uint + Uint8 uint8 + Uint16 uint16 + Uint32 uint32 + Uint64 uint64 + } + Floats struct { + Float64 float64 + Float32 float32 + } + Byte byte + Bool bool + } `json:"nestedTagged"` + LeftOut string `json:"-"` + SelfPtr *ComplicatedType + SelfSlice []ComplicatedType + SelfSlicePtr []*ComplicatedType + SelfPtrSlice *[]ComplicatedType + Interface interface{} `json:"interface"` + Array [3]int + Time time.Time `json:"time"` + Binary []byte + NonBinary []byte +} + +var complicatedJSON = ` +{ + "tagged": "OK", + "Tagged": "KO", + "NotTagged": true, + "unsettable": 101, + "Nested": { + "Yellow": "Green", + "yellow": "yellow" + }, + "nestedTagged": { + "Green": "Green", + "Map": { + "this": "that", + "and": "the other thing" + }, + "Ints": { + "Uint": 99, + "Uint16": 16, + "Uint32": 32, + "Uint64": 65 + }, + "Uints": { + "int": -99, + "Int": -98, + "Int16": -16, + "Int32": -32, + "int64": -64, + "Int64": -65 + }, + "Uints": { + "Float32": 32.32, + "Float64": 64.64 + }, + "Byte": 254, + "Bool": true + }, + "LeftOut": "you shouldn't be here", + "SelfPtr": {"tagged":"OK","nestedTagged":{"Ints":{"Uint32":32}}}, + "SelfSlice": [{"tagged":"OK","nestedTagged":{"Ints":{"Uint32":32}}}], + "SelfSlicePtr": [{"tagged":"OK","nestedTagged":{"Ints":{"Uint32":32}}}], + "SelfPtrSlice": [{"tagged":"OK","nestedTagged":{"Ints":{"Uint32":32}}}], + "interface": "Tile38 Rocks!", + "Interface": "Please Download", + "Array": [0,2,3,4,5], + "time": "2017-05-07T13:24:43-07:00", + "Binary": "R0lGODlhPQBEAPeo", + "NonBinary": [9,3,100,115] +} +` + +func TestUnmarshal(t *testing.T) { + var s1 ComplicatedType + var s2 ComplicatedType + if err := json.Unmarshal([]byte(complicatedJSON), &s1); err != nil { + t.Fatal(err) + } + if err := Unmarshal([]byte(complicatedJSON), &s2); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(&s1, &s2) { + t.Fatal("not equal") + } + var str string + if err := json.Unmarshal([]byte(Get(complicatedJSON, "LeftOut").Raw), &str); err != nil { + t.Fatal(err) + } + assert(t, str == Get(complicatedJSON, "LeftOut").String()) +} + +func testvalid(json string, expect bool) { + _, ok := validpayload([]byte(json), 0) + if ok != expect { + panic("mismatch") + } +} + +func TestValidBasic(t *testing.T) { + testvalid("0", true) + testvalid("00", false) + testvalid("-00", false) + testvalid("-.", false) + testvalid("0.0", true) + testvalid("10.0", true) + testvalid("10e1", true) + testvalid("10EE", false) + testvalid("10E-", false) + testvalid("10E+", false) + testvalid("10E123", true) + testvalid("10E-123", true) + testvalid("10E-0123", true) + testvalid("", false) + testvalid(" ", false) + testvalid("{}", true) + testvalid("{", false) + testvalid("-", false) + testvalid("-1", true) + testvalid("-1.", false) + testvalid("-1.0", true) + testvalid(" -1.0", true) + testvalid(" -1.0 ", true) + testvalid("-1.0 ", true) + testvalid("-1.0 i", false) + testvalid("-1.0 i", false) + testvalid("true", true) + testvalid(" true", true) + testvalid(" true ", true) + testvalid(" True ", false) + testvalid(" tru", false) + testvalid("false", true) + testvalid(" false", true) + testvalid(" false ", true) + testvalid(" False ", false) + testvalid(" fals", false) + testvalid("null", true) + testvalid(" null", true) + testvalid(" null ", true) + testvalid(" Null ", false) + testvalid(" nul", false) + testvalid(" []", true) + testvalid(" [true]", true) + testvalid(" [ true, null ]", true) + testvalid(" [ true,]", false) + testvalid(`{"hello":"world"}`, true) + testvalid(`{ "hello": "world" }`, true) + testvalid(`{ "hello": "world", }`, false) + testvalid(`{"a":"b",}`, false) + testvalid(`{"a":"b","a"}`, false) + testvalid(`{"a":"b","a":}`, false) + testvalid(`{"a":"b","a":1}`, true) + testvalid(`{"a":"b","a": 1, "c":{"hi":"there"} }`, true) + testvalid(`{"a":"b","a": 1, "c":{"hi":"there", "easy":["going",{"mixed":"bag"}]} }`, true) + testvalid(`""`, true) + testvalid(`"`, false) + testvalid(`"\n"`, true) + testvalid(`"\"`, false) + testvalid(`"\\"`, true) + testvalid(`"a\\b"`, true) + testvalid(`"a\\b\\\"a"`, true) + testvalid(`"a\\b\\\uFFAAa"`, true) + testvalid(`"a\\b\\\uFFAZa"`, false) + testvalid(`"a\\b\\\uFFA"`, false) + testvalid(string(complicatedJSON), true) + testvalid(string(exampleJSON), true) +} + +var jsonchars = []string{"{", "[", ",", ":", "}", "]", "1", "0", "true", "false", "null", `""`, `"\""`, `"a"`} + +func makeRandomJSONChars(b []byte) { + var bb []byte + for len(bb) < len(b) { + bb = append(bb, jsonchars[rand.Int()%len(jsonchars)]...) + } + copy(b, bb[:len(b)]) +} +func TestValidRandom(t *testing.T) { + rand.Seed(time.Now().UnixNano()) + b := make([]byte, 100000) + start := time.Now() + for time.Since(start) < time.Second*3 { + n := rand.Int() % len(b) + rand.Read(b[:n]) + validpayload(b[:n], 0) + } + + start = time.Now() + for time.Since(start) < time.Second*3 { + n := rand.Int() % len(b) + makeRandomJSONChars(b[:n]) + validpayload(b[:n], 0) + } +} + +func TestGetMany47(t *testing.T) { + json := `{"bar": {"id": 99, "mybar": "my mybar" }, "foo": {"myfoo": [605]}}` + paths := []string{"foo.myfoo", "bar.id", "bar.mybar", "bar.mybarx"} + expected := []string{"[605]", "99", "my mybar", ""} + results := GetMany(json, paths...) + if len(expected) != len(results) { + t.Fatalf("expected %v, got %v", len(expected), len(results)) + } + for i, path := range paths { + if results[i].String() != expected[i] { + t.Fatalf("expected '%v', got '%v' for path '%v'", expected[i], results[i].String(), path) + } + } +} + +func TestGetMany48(t *testing.T) { + json := `{"bar": {"id": 99, "xyz": "my xyz"}, "foo": {"myfoo": [605]}}` + paths := []string{"foo.myfoo", "bar.id", "bar.xyz", "bar.abc"} + expected := []string{"[605]", "99", "my xyz", ""} + results := GetMany(json, paths...) + if len(expected) != len(results) { + t.Fatalf("expected %v, got %v", len(expected), len(results)) + } + for i, path := range paths { + if results[i].String() != expected[i] { + t.Fatalf("expected '%v', got '%v' for path '%v'", expected[i], results[i].String(), path) + } + } +} + +func TestResultRawForLiteral(t *testing.T) { + for _, lit := range []string{"null", "true", "false"} { + result := Parse(lit) + if result.Raw != lit { + t.Fatalf("expected '%v', got '%v'", lit, result.Raw) + } + } +} diff --git a/vendor/src/github.com/tidwall/gjson/logo.png b/vendor/src/github.com/tidwall/gjson/logo.png new file mode 100644 index 00000000..17a8bbe9 Binary files /dev/null and b/vendor/src/github.com/tidwall/gjson/logo.png differ diff --git a/vendor/src/github.com/tidwall/match/LICENSE b/vendor/src/github.com/tidwall/match/LICENSE new file mode 100644 index 00000000..58f5819a --- /dev/null +++ b/vendor/src/github.com/tidwall/match/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2016 Josh Baker + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/src/github.com/tidwall/match/README.md b/vendor/src/github.com/tidwall/match/README.md new file mode 100644 index 00000000..2aa5bc38 --- /dev/null +++ b/vendor/src/github.com/tidwall/match/README.md @@ -0,0 +1,32 @@ +Match +===== + + + +Match is a very simple pattern matcher where '*' matches on any +number characters and '?' matches on any one character. + +Installing +---------- + +``` +go get -u github.com/tidwall/match +``` + +Example +------- + +```go +match.Match("hello", "*llo") +match.Match("jello", "?ello") +match.Match("hello", "h*o") +``` + + +Contact +------- +Josh Baker [@tidwall](http://twitter.com/tidwall) + +License +------- +Redcon source code is available under the MIT [License](/LICENSE). diff --git a/vendor/src/github.com/tidwall/match/match.go b/vendor/src/github.com/tidwall/match/match.go new file mode 100644 index 00000000..8885add6 --- /dev/null +++ b/vendor/src/github.com/tidwall/match/match.go @@ -0,0 +1,192 @@ +// Match provides a simple pattern matcher with unicode support. +package match + +import "unicode/utf8" + +// Match returns true if str matches pattern. This is a very +// simple wildcard match where '*' matches on any number characters +// and '?' matches on any one character. + +// pattern: +// { term } +// term: +// '*' matches any sequence of non-Separator characters +// '?' matches any single non-Separator character +// c matches character c (c != '*', '?', '\\') +// '\\' c matches character c +// +func Match(str, pattern string) bool { + if pattern == "*" { + return true + } + return deepMatch(str, pattern) +} +func deepMatch(str, pattern string) bool { + for len(pattern) > 0 { + if pattern[0] > 0x7f { + return deepMatchRune(str, pattern) + } + switch pattern[0] { + default: + if len(str) == 0 { + return false + } + if str[0] > 0x7f { + return deepMatchRune(str, pattern) + } + if str[0] != pattern[0] { + return false + } + case '?': + if len(str) == 0 { + return false + } + case '*': + return deepMatch(str, pattern[1:]) || + (len(str) > 0 && deepMatch(str[1:], pattern)) + } + str = str[1:] + pattern = pattern[1:] + } + return len(str) == 0 && len(pattern) == 0 +} + +func deepMatchRune(str, pattern string) bool { + var sr, pr rune + var srsz, prsz int + + // read the first rune ahead of time + if len(str) > 0 { + if str[0] > 0x7f { + sr, srsz = utf8.DecodeRuneInString(str) + } else { + sr, srsz = rune(str[0]), 1 + } + } else { + sr, srsz = utf8.RuneError, 0 + } + if len(pattern) > 0 { + if pattern[0] > 0x7f { + pr, prsz = utf8.DecodeRuneInString(pattern) + } else { + pr, prsz = rune(pattern[0]), 1 + } + } else { + pr, prsz = utf8.RuneError, 0 + } + // done reading + for pr != utf8.RuneError { + switch pr { + default: + if srsz == utf8.RuneError { + return false + } + if sr != pr { + return false + } + case '?': + if srsz == utf8.RuneError { + return false + } + case '*': + return deepMatchRune(str, pattern[prsz:]) || + (srsz > 0 && deepMatchRune(str[srsz:], pattern)) + } + str = str[srsz:] + pattern = pattern[prsz:] + // read the next runes + if len(str) > 0 { + if str[0] > 0x7f { + sr, srsz = utf8.DecodeRuneInString(str) + } else { + sr, srsz = rune(str[0]), 1 + } + } else { + sr, srsz = utf8.RuneError, 0 + } + if len(pattern) > 0 { + if pattern[0] > 0x7f { + pr, prsz = utf8.DecodeRuneInString(pattern) + } else { + pr, prsz = rune(pattern[0]), 1 + } + } else { + pr, prsz = utf8.RuneError, 0 + } + // done reading + } + + return srsz == 0 && prsz == 0 +} + +var maxRuneBytes = func() []byte { + b := make([]byte, 4) + if utf8.EncodeRune(b, '\U0010FFFF') != 4 { + panic("invalid rune encoding") + } + return b +}() + +// Allowable parses the pattern and determines the minimum and maximum allowable +// values that the pattern can represent. +// When the max cannot be determined, 'true' will be returned +// for infinite. +func Allowable(pattern string) (min, max string) { + if pattern == "" || pattern[0] == '*' { + return "", "" + } + + minb := make([]byte, 0, len(pattern)) + maxb := make([]byte, 0, len(pattern)) + var wild bool + for i := 0; i < len(pattern); i++ { + if pattern[i] == '*' { + wild = true + break + } + if pattern[i] == '?' { + minb = append(minb, 0) + maxb = append(maxb, maxRuneBytes...) + } else { + minb = append(minb, pattern[i]) + maxb = append(maxb, pattern[i]) + } + } + if wild { + r, n := utf8.DecodeLastRune(maxb) + if r != utf8.RuneError { + if r < utf8.MaxRune { + r++ + if r > 0x7f { + b := make([]byte, 4) + nn := utf8.EncodeRune(b, r) + maxb = append(maxb[:len(maxb)-n], b[:nn]...) + } else { + maxb = append(maxb[:len(maxb)-n], byte(r)) + } + } + } + } + return string(minb), string(maxb) + /* + return + if wild { + r, n := utf8.DecodeLastRune(maxb) + if r != utf8.RuneError { + if r < utf8.MaxRune { + infinite = true + } else { + r++ + if r > 0x7f { + b := make([]byte, 4) + nn := utf8.EncodeRune(b, r) + maxb = append(maxb[:len(maxb)-n], b[:nn]...) + } else { + maxb = append(maxb[:len(maxb)-n], byte(r)) + } + } + } + } + return string(minb), string(maxb), infinite + */ +} diff --git a/vendor/src/github.com/tidwall/match/match_test.go b/vendor/src/github.com/tidwall/match/match_test.go new file mode 100644 index 00000000..032ee59a --- /dev/null +++ b/vendor/src/github.com/tidwall/match/match_test.go @@ -0,0 +1,408 @@ +package match + +import ( + "fmt" + "math/rand" + "testing" + "time" + "unicode/utf8" +) + +func TestMatch(t *testing.T) { + if !Match("hello world", "hello world") { + t.Fatal("fail") + } + if Match("hello world", "jello world") { + t.Fatal("fail") + } + if !Match("hello world", "hello*") { + t.Fatal("fail") + } + if Match("hello world", "jello*") { + t.Fatal("fail") + } + if !Match("hello world", "hello?world") { + t.Fatal("fail") + } + if Match("hello world", "jello?world") { + t.Fatal("fail") + } + if !Match("hello world", "he*o?world") { + t.Fatal("fail") + } + if !Match("hello world", "he*o?wor*") { + t.Fatal("fail") + } + if !Match("hello world", "he*o?*r*") { + t.Fatal("fail") + } + if !Match("的情况下解析一个", "*") { + t.Fatal("fail") + } + if !Match("的情况下解析一个", "*况下*") { + t.Fatal("fail") + } + if !Match("的情况下解析一个", "*况?*") { + t.Fatal("fail") + } + if !Match("的情况下解析一个", "的情况?解析一个") { + t.Fatal("fail") + } +} + +// TestWildcardMatch - Tests validate the logic of wild card matching. +// `WildcardMatch` supports '*' and '?' wildcards. +// Sample usage: In resource matching for folder policy validation. +func TestWildcardMatch(t *testing.T) { + testCases := []struct { + pattern string + text string + matched bool + }{ + // Test case - 1. + // Test case with pattern containing key name with a prefix. Should accept the same text without a "*". + { + pattern: "my-folder/oo*", + text: "my-folder/oo", + matched: true, + }, + // Test case - 2. + // Test case with "*" at the end of the pattern. + { + pattern: "my-folder/In*", + text: "my-folder/India/Karnataka/", + matched: true, + }, + // Test case - 3. + // Test case with prefixes shuffled. + // This should fail. + { + pattern: "my-folder/In*", + text: "my-folder/Karnataka/India/", + matched: false, + }, + // Test case - 4. + // Test case with text expanded to the wildcards in the pattern. + { + pattern: "my-folder/In*/Ka*/Ban", + text: "my-folder/India/Karnataka/Ban", + matched: true, + }, + // Test case - 5. + // Test case with the keyname part is repeated as prefix several times. + // This is valid. + { + pattern: "my-folder/In*/Ka*/Ban", + text: "my-folder/India/Karnataka/Ban/Ban/Ban/Ban/Ban", + matched: true, + }, + // Test case - 6. + // Test case to validate that `*` can be expanded into multiple prefixes. + { + pattern: "my-folder/In*/Ka*/Ban", + text: "my-folder/India/Karnataka/Area1/Area2/Area3/Ban", + matched: true, + }, + // Test case - 7. + // Test case to validate that `*` can be expanded into multiple prefixes. + { + pattern: "my-folder/In*/Ka*/Ban", + text: "my-folder/India/State1/State2/Karnataka/Area1/Area2/Area3/Ban", + matched: true, + }, + // Test case - 8. + // Test case where the keyname part of the pattern is expanded in the text. + { + pattern: "my-folder/In*/Ka*/Ban", + text: "my-folder/India/Karnataka/Bangalore", + matched: false, + }, + // Test case - 9. + // Test case with prefixes and wildcard expanded for all "*". + { + pattern: "my-folder/In*/Ka*/Ban*", + text: "my-folder/India/Karnataka/Bangalore", + matched: true, + }, + // Test case - 10. + // Test case with keyname part being a wildcard in the pattern. + {pattern: "my-folder/*", + text: "my-folder/India", + matched: true, + }, + // Test case - 11. + { + pattern: "my-folder/oo*", + text: "my-folder/odo", + matched: false, + }, + + // Test case with pattern containing wildcard '?'. + // Test case - 12. + // "my-folder?/" matches "my-folder1/", "my-folder2/", "my-folder3" etc... + // doesn't match "myfolder/". + { + pattern: "my-folder?/abc*", + text: "myfolder/abc", + matched: false, + }, + // Test case - 13. + { + pattern: "my-folder?/abc*", + text: "my-folder1/abc", + matched: true, + }, + // Test case - 14. + { + pattern: "my-?-folder/abc*", + text: "my--folder/abc", + matched: false, + }, + // Test case - 15. + { + pattern: "my-?-folder/abc*", + text: "my-1-folder/abc", + matched: true, + }, + // Test case - 16. + { + pattern: "my-?-folder/abc*", + text: "my-k-folder/abc", + matched: true, + }, + // Test case - 17. + { + pattern: "my??folder/abc*", + text: "myfolder/abc", + matched: false, + }, + // Test case - 18. + { + pattern: "my??folder/abc*", + text: "my4afolder/abc", + matched: true, + }, + // Test case - 19. + { + pattern: "my-folder?abc*", + text: "my-folder/abc", + matched: true, + }, + // Test case 20-21. + // '?' matches '/' too. (works with s3). + // This is because the namespace is considered flat. + // "abc?efg" matches both "abcdefg" and "abc/efg". + { + pattern: "my-folder/abc?efg", + text: "my-folder/abcdefg", + matched: true, + }, + { + pattern: "my-folder/abc?efg", + text: "my-folder/abc/efg", + matched: true, + }, + // Test case - 22. + { + pattern: "my-folder/abc????", + text: "my-folder/abc", + matched: false, + }, + // Test case - 23. + { + pattern: "my-folder/abc????", + text: "my-folder/abcde", + matched: false, + }, + // Test case - 24. + { + pattern: "my-folder/abc????", + text: "my-folder/abcdefg", + matched: true, + }, + // Test case 25-26. + // test case with no '*'. + { + pattern: "my-folder/abc?", + text: "my-folder/abc", + matched: false, + }, + { + pattern: "my-folder/abc?", + text: "my-folder/abcd", + matched: true, + }, + { + pattern: "my-folder/abc?", + text: "my-folder/abcde", + matched: false, + }, + // Test case 27. + { + pattern: "my-folder/mnop*?", + text: "my-folder/mnop", + matched: false, + }, + // Test case 28. + { + pattern: "my-folder/mnop*?", + text: "my-folder/mnopqrst/mnopqr", + matched: true, + }, + // Test case 29. + { + pattern: "my-folder/mnop*?", + text: "my-folder/mnopqrst/mnopqrs", + matched: true, + }, + // Test case 30. + { + pattern: "my-folder/mnop*?", + text: "my-folder/mnop", + matched: false, + }, + // Test case 31. + { + pattern: "my-folder/mnop*?", + text: "my-folder/mnopq", + matched: true, + }, + // Test case 32. + { + pattern: "my-folder/mnop*?", + text: "my-folder/mnopqr", + matched: true, + }, + // Test case 33. + { + pattern: "my-folder/mnop*?and", + text: "my-folder/mnopqand", + matched: true, + }, + // Test case 34. + { + pattern: "my-folder/mnop*?and", + text: "my-folder/mnopand", + matched: false, + }, + // Test case 35. + { + pattern: "my-folder/mnop*?and", + text: "my-folder/mnopqand", + matched: true, + }, + // Test case 36. + { + pattern: "my-folder/mnop*?", + text: "my-folder/mn", + matched: false, + }, + // Test case 37. + { + pattern: "my-folder/mnop*?", + text: "my-folder/mnopqrst/mnopqrs", + matched: true, + }, + // Test case 38. + { + pattern: "my-folder/mnop*??", + text: "my-folder/mnopqrst", + matched: true, + }, + // Test case 39. + { + pattern: "my-folder/mnop*qrst", + text: "my-folder/mnopabcdegqrst", + matched: true, + }, + // Test case 40. + { + pattern: "my-folder/mnop*?and", + text: "my-folder/mnopqand", + matched: true, + }, + // Test case 41. + { + pattern: "my-folder/mnop*?and", + text: "my-folder/mnopand", + matched: false, + }, + // Test case 42. + { + pattern: "my-folder/mnop*?and?", + text: "my-folder/mnopqanda", + matched: true, + }, + // Test case 43. + { + pattern: "my-folder/mnop*?and", + text: "my-folder/mnopqanda", + matched: false, + }, + // Test case 44. + + { + pattern: "my-?-folder/abc*", + text: "my-folder/mnopqanda", + matched: false, + }, + } + // Iterating over the test cases, call the function under test and asert the output. + for i, testCase := range testCases { + actualResult := Match(testCase.text, testCase.pattern) + if testCase.matched != actualResult { + t.Errorf("Test %d: Expected the result to be `%v`, but instead found it to be `%v`", i+1, testCase.matched, actualResult) + } + } +} +func TestRandomInput(t *testing.T) { + rand.Seed(time.Now().UnixNano()) + b1 := make([]byte, 100) + b2 := make([]byte, 100) + for i := 0; i < 1000000; i++ { + if _, err := rand.Read(b1); err != nil { + t.Fatal(err) + } + if _, err := rand.Read(b2); err != nil { + t.Fatal(err) + } + Match(string(b1), string(b2)) + } +} +func testAllowable(pattern, exmin, exmax string) error { + min, max := Allowable(pattern) + if min != exmin || max != exmax { + return fmt.Errorf("expected '%v'/'%v', got '%v'/'%v'", + exmin, exmax, min, max) + } + return nil +} +func TestAllowable(t *testing.T) { + if err := testAllowable("hell*", "hell", "helm"); err != nil { + t.Fatal(err) + } + if err := testAllowable("hell?", "hell"+string(0), "hell"+string(utf8.MaxRune)); err != nil { + t.Fatal(err) + } + if err := testAllowable("h解析ell*", "h解析ell", "h解析elm"); err != nil { + t.Fatal(err) + } + if err := testAllowable("h解*ell*", "h解", "h觤"); err != nil { + t.Fatal(err) + } +} +func BenchmarkAscii(t *testing.B) { + for i := 0; i < t.N; i++ { + if !Match("hello", "hello") { + t.Fatal("fail") + } + } +} + +func BenchmarkUnicode(t *testing.B) { + for i := 0; i < t.N; i++ { + if !Match("h情llo", "h情llo") { + t.Fatal("fail") + } + } +} diff --git a/vendor/src/github.com/tidwall/sjson/LICENSE b/vendor/src/github.com/tidwall/sjson/LICENSE new file mode 100644 index 00000000..89593c7c --- /dev/null +++ b/vendor/src/github.com/tidwall/sjson/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Josh Baker + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/src/github.com/tidwall/sjson/README.md b/vendor/src/github.com/tidwall/sjson/README.md new file mode 100644 index 00000000..1a7c5c42 --- /dev/null +++ b/vendor/src/github.com/tidwall/sjson/README.md @@ -0,0 +1,278 @@ + + +set a json value quickly
+ +SJSON is a Go package that provides a [very fast](#performance) and simple way to set a value in a json document. The purpose for this library is to provide efficient json updating for the [SummitDB](https://github.com/tidwall/summitdb) project. +For quickly retrieving json values check out [GJSON](https://github.com/tidwall/gjson). + +For a command line interface check out [JSONed](https://github.com/tidwall/jsoned). + +Getting Started +=============== + +Installing +---------- + +To start using SJSON, install Go and run `go get`: + +```sh +$ go get -u github.com/tidwall/sjson +``` + +This will retrieve the library. + +Set a value +----------- +Set sets the value for the specified path. +A path is in dot syntax, such as "name.last" or "age". +This function expects that the json is well-formed and validated. +Invalid json will not panic, but it may return back unexpected results. +Invalid paths may return an error. + +```go +package main + +import "github.com/tidwall/sjson" + +const json = `{"name":{"first":"Janet","last":"Prichard"},"age":47}` + +func main() { + value, _ := sjson.Set(json, "name.last", "Anderson") + println(value) +} +``` + +This will print: + +```json +{"name":{"first":"Janet","last":"Anderson"},"age":47} +``` + +Path syntax +----------- + +A path is a series of keys separated by a dot. +The dot and colon characters can be escaped with '\'. + +```json +{ + "name": {"first": "Tom", "last": "Anderson"}, + "age":37, + "children": ["Sara","Alex","Jack"], + "fav.movie": "Deer Hunter", + "friends": [ + {"first": "James", "last": "Murphy"}, + {"first": "Roger", "last": "Craig"} + ] +} +``` +``` +"name.last" >> "Anderson" +"age" >> 37 +"children.1" >> "Alex" +"friends.1.last" >> "Craig" +``` + +The `-1` key can be used to append a value to an existing array: + +``` +"children.-1" >> appends a new value to the end of the children array +``` + +Normally number keys are used to modify arrays, but it's possible to force a numeric object key by using the colon character: + +```json +{ + "users":{ + "2313":{"name":"Sara"}, + "7839":{"name":"Andy"} + } +} +``` + +A colon path would look like: + +``` +"users.:2313.name" >> "Sara" +``` + +Supported types +--------------- + +Pretty much any type is supported: + +```go +sjson.Set(`{"key":true}`, "key", nil) +sjson.Set(`{"key":true}`, "key", false) +sjson.Set(`{"key":true}`, "key", 1) +sjson.Set(`{"key":true}`, "key", 10.5) +sjson.Set(`{"key":true}`, "key", "hello") +sjson.Set(`{"key":true}`, "key", map[string]interface{}{"hello":"world"}) +``` + +When a type is not recognized, SJSON will fallback to the `encoding/json` Marshaller. + + +Examples +-------- + +Set a value from empty document: +```go +value, _ := sjson.Set("", "name", "Tom") +println(value) + +// Output: +// {"name":"Tom"} +``` + +Set a nested value from empty document: +```go +value, _ := sjson.Set("", "name.last", "Anderson") +println(value) + +// Output: +// {"name":{"last":"Anderson"}} +``` + +Set a new value: +```go +value, _ := sjson.Set(`{"name":{"last":"Anderson"}}`, "name.first", "Sara") +println(value) + +// Output: +// {"name":{"first":"Sara","last":"Anderson"}} +``` + +Update an existing value: +```go +value, _ := sjson.Set(`{"name":{"last":"Anderson"}}`, "name.last", "Smith") +println(value) + +// Output: +// {"name":{"last":"Smith"}} +``` + +Set a new array value: +```go +value, _ := sjson.Set(`{"friends":["Andy","Carol"]}`, "friends.2", "Sara") +println(value) + +// Output: +// {"friends":["Andy","Carol","Sara"] +``` + +Append an array value by using the `-1` key in a path: +```go +value, _ := sjson.Set(`{"friends":["Andy","Carol"]}`, "friends.-1", "Sara") +println(value) + +// Output: +// {"friends":["Andy","Carol","Sara"] +``` + +Append an array value that is past the end: +```go +value, _ := sjson.Set(`{"friends":["Andy","Carol"]}`, "friends.4", "Sara") +println(value) + +// Output: +// {"friends":["Andy","Carol",null,null,"Sara"] +``` + +Delete a value: +```go +value, _ := sjson.Delete(`{"name":{"first":"Sara","last":"Anderson"}}`, "name.first") +println(value) + +// Output: +// {"name":{"last":"Anderson"}} +``` + +Delete an array value: +```go +value, _ := sjson.Delete(`{"friends":["Andy","Carol"]}`, "friends.1") +println(value) + +// Output: +// {"friends":["Andy"]} +``` + +Delete the last array value: +```go +value, _ := sjson.Delete(`{"friends":["Andy","Carol"]}`, "friends.-1") +println(value) + +// Output: +// {"friends":["Andy"]} +``` + +## Performance + +Benchmarks of SJSON alongside [encoding/json](https://golang.org/pkg/encoding/json/), +[ffjson](https://github.com/pquerna/ffjson), +[EasyJSON](https://github.com/mailru/easyjson), +and [Gabs](https://github.com/Jeffail/gabs) + +``` +Benchmark_SJSON-8 3000000 805 ns/op 1077 B/op 3 allocs/op +Benchmark_SJSON_ReplaceInPlace-8 3000000 449 ns/op 0 B/op 0 allocs/op +Benchmark_JSON_Map-8 300000 21236 ns/op 6392 B/op 150 allocs/op +Benchmark_JSON_Struct-8 300000 14691 ns/op 1789 B/op 24 allocs/op +Benchmark_Gabs-8 300000 21311 ns/op 6752 B/op 150 allocs/op +Benchmark_FFJSON-8 300000 17673 ns/op 3589 B/op 47 allocs/op +Benchmark_EasyJSON-8 1500000 3119 ns/op 1061 B/op 13 allocs/op +``` + +JSON document used: + +```json +{ + "widget": { + "debug": "on", + "window": { + "title": "Sample Konfabulator Widget", + "name": "main_window", + "width": 500, + "height": 500 + }, + "image": { + "src": "Images/Sun.png", + "hOffset": 250, + "vOffset": 250, + "alignment": "center" + }, + "text": { + "data": "Click Here", + "size": 36, + "style": "bold", + "vOffset": 100, + "alignment": "center", + "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;" + } + } +} +``` + +Each operation was rotated though one of the following search paths: + +``` +widget.window.name +widget.image.hOffset +widget.text.onMouseUp +``` + +*These benchmarks were run on a MacBook Pro 15" 2.8 GHz Intel Core i7 using Go 1.7.* + +## Contact +Josh Baker [@tidwall](http://twitter.com/tidwall) + +## License + +SJSON source code is available under the MIT [License](/LICENSE). diff --git a/vendor/src/github.com/tidwall/sjson/logo.png b/vendor/src/github.com/tidwall/sjson/logo.png new file mode 100644 index 00000000..b5aa257b Binary files /dev/null and b/vendor/src/github.com/tidwall/sjson/logo.png differ diff --git a/vendor/src/github.com/tidwall/sjson/sjson.go b/vendor/src/github.com/tidwall/sjson/sjson.go new file mode 100644 index 00000000..7f1d3588 --- /dev/null +++ b/vendor/src/github.com/tidwall/sjson/sjson.go @@ -0,0 +1,653 @@ +// Package sjson provides setting json values. +package sjson + +import ( + jsongo "encoding/json" + "reflect" + "strconv" + "unsafe" + + "github.com/tidwall/gjson" +) + +type errorType struct { + msg string +} + +func (err *errorType) Error() string { + return err.msg +} + +// Options represents additional options for the Set and Delete functions. +type Options struct { + // Optimistic is a hint that the value likely exists which + // allows for the sjson to perform a fast-track search and replace. + Optimistic bool + // ReplaceInPlace is a hint to replace the input json rather than + // allocate a new json byte slice. When this field is specified + // the input json will not longer be valid and it should not be used + // In the case when the destination slice doesn't have enough free + // bytes to replace the data in place, a new bytes slice will be + // created under the hood. + // The Optimistic flag must be set to true and the input must be a + // byte slice in order to use this field. + ReplaceInPlace bool +} + +type pathResult struct { + part string // current key part + path string // remaining path + force bool // force a string key + more bool // there is more path to parse +} + +func parsePath(path string) (pathResult, error) { + var r pathResult + if len(path) > 0 && path[0] == ':' { + r.force = true + path = path[1:] + } + for i := 0; i < len(path); i++ { + if path[i] == '.' { + r.part = path[:i] + r.path = path[i+1:] + r.more = true + return r, nil + } + if path[i] == '*' || path[i] == '?' { + return r, &errorType{"wildcard characters not allowed in path"} + } else if path[i] == '#' { + return r, &errorType{"array access character not allowed in path"} + } + if path[i] == '\\' { + // go into escape mode. this is a slower path that + // strips off the escape character from the part. + epart := []byte(path[:i]) + i++ + if i < len(path) { + epart = append(epart, path[i]) + i++ + for ; i < len(path); i++ { + if path[i] == '\\' { + i++ + if i < len(path) { + epart = append(epart, path[i]) + } + continue + } else if path[i] == '.' { + r.part = string(epart) + r.path = path[i+1:] + r.more = true + return r, nil + } else if path[i] == '*' || path[i] == '?' { + return r, &errorType{ + "wildcard characters not allowed in path"} + } else if path[i] == '#' { + return r, &errorType{ + "array access character not allowed in path"} + } + epart = append(epart, path[i]) + } + } + // append the last part + r.part = string(epart) + return r, nil + } + } + r.part = path + return r, nil +} + +func mustMarshalString(s string) bool { + for i := 0; i < len(s); i++ { + if s[i] < ' ' || s[i] > 0x7f || s[i] == '"' { + return true + } + } + return false +} + +// appendStringify makes a json string and appends to buf. +func appendStringify(buf []byte, s string) []byte { + if mustMarshalString(s) { + b, _ := jsongo.Marshal(s) + return append(buf, b...) + } + buf = append(buf, '"') + buf = append(buf, s...) + buf = append(buf, '"') + return buf +} + +// appendBuild builds a json block from a json path. +func appendBuild(buf []byte, array bool, paths []pathResult, raw string, + stringify bool) []byte { + if !array { + buf = appendStringify(buf, paths[0].part) + buf = append(buf, ':') + } + if len(paths) > 1 { + n, numeric := atoui(paths[1]) + if numeric || (!paths[1].force && paths[1].part == "-1") { + buf = append(buf, '[') + buf = appendRepeat(buf, "null,", n) + buf = appendBuild(buf, true, paths[1:], raw, stringify) + buf = append(buf, ']') + } else { + buf = append(buf, '{') + buf = appendBuild(buf, false, paths[1:], raw, stringify) + buf = append(buf, '}') + } + } else { + if stringify { + buf = appendStringify(buf, raw) + } else { + buf = append(buf, raw...) + } + } + return buf +} + +// atoui does a rip conversion of string -> unigned int. +func atoui(r pathResult) (n int, ok bool) { + if r.force { + return 0, false + } + for i := 0; i < len(r.part); i++ { + if r.part[i] < '0' || r.part[i] > '9' { + return 0, false + } + n = n*10 + int(r.part[i]-'0') + } + return n, true +} + +// appendRepeat repeats string "n" times and appends to buf. +func appendRepeat(buf []byte, s string, n int) []byte { + for i := 0; i < n; i++ { + buf = append(buf, s...) + } + return buf +} + +// trim does a rip trim +func trim(s string) string { + for len(s) > 0 { + if s[0] <= ' ' { + s = s[1:] + continue + } + break + } + for len(s) > 0 { + if s[len(s)-1] <= ' ' { + s = s[:len(s)-1] + continue + } + break + } + return s +} + +// deleteTailItem deletes the previous key or comma. +func deleteTailItem(buf []byte) ([]byte, bool) { +loop: + for i := len(buf) - 1; i >= 0; i-- { + // look for either a ',',':','[' + switch buf[i] { + case '[': + return buf, true + case ',': + return buf[:i], false + case ':': + // delete tail string + i-- + for ; i >= 0; i-- { + if buf[i] == '"' { + i-- + for ; i >= 0; i-- { + if buf[i] == '"' { + i-- + if i >= 0 && i == '\\' { + i-- + continue + } + for ; i >= 0; i-- { + // look for either a ',','{' + switch buf[i] { + case '{': + return buf[:i+1], true + case ',': + return buf[:i], false + } + } + } + } + break + } + } + break loop + } + } + return buf, false +} + +var errNoChange = &errorType{"no change"} + +func appendRawPaths(buf []byte, jstr string, paths []pathResult, raw string, + stringify, del bool) ([]byte, error) { + var err error + var res gjson.Result + var found bool + if del { + if paths[0].part == "-1" && !paths[0].force { + res = gjson.Get(jstr, "#") + if res.Int() > 0 { + res = gjson.Get(jstr, strconv.FormatInt(int64(res.Int()-1), 10)) + found = true + } + } + } + if !found { + res = gjson.Get(jstr, paths[0].part) + } + if res.Index > 0 { + if len(paths) > 1 { + buf = append(buf, jstr[:res.Index]...) + buf, err = appendRawPaths(buf, res.Raw, paths[1:], raw, + stringify, del) + if err != nil { + return nil, err + } + buf = append(buf, jstr[res.Index+len(res.Raw):]...) + return buf, nil + } + buf = append(buf, jstr[:res.Index]...) + var exidx int // additional forward stripping + if del { + var delNextComma bool + buf, delNextComma = deleteTailItem(buf) + if delNextComma { + i, j := res.Index+len(res.Raw), 0 + for ; i < len(jstr); i, j = i+1, j+1 { + if jstr[i] <= ' ' { + continue + } + if jstr[i] == ',' { + exidx = j + 1 + } + break + } + } + } else { + if stringify { + buf = appendStringify(buf, raw) + } else { + buf = append(buf, raw...) + } + } + buf = append(buf, jstr[res.Index+len(res.Raw)+exidx:]...) + return buf, nil + } + if del { + return nil, errNoChange + } + n, numeric := atoui(paths[0]) + isempty := true + for i := 0; i < len(jstr); i++ { + if jstr[i] > ' ' { + isempty = false + break + } + } + if isempty { + if numeric { + jstr = "[]" + } else { + jstr = "{}" + } + } + jsres := gjson.Parse(jstr) + if jsres.Type != gjson.JSON { + if numeric { + jstr = "[]" + } else { + jstr = "{}" + } + jsres = gjson.Parse(jstr) + } + var comma bool + for i := 1; i < len(jsres.Raw); i++ { + if jsres.Raw[i] <= ' ' { + continue + } + if jsres.Raw[i] == '}' || jsres.Raw[i] == ']' { + break + } + comma = true + break + } + switch jsres.Raw[0] { + default: + return nil, &errorType{"json must be an object or array"} + case '{': + buf = append(buf, '{') + buf = appendBuild(buf, false, paths, raw, stringify) + if comma { + buf = append(buf, ',') + } + buf = append(buf, jsres.Raw[1:]...) + return buf, nil + case '[': + var appendit bool + if !numeric { + if paths[0].part == "-1" && !paths[0].force { + appendit = true + } else { + return nil, &errorType{ + "cannot set array element for non-numeric key '" + + paths[0].part + "'"} + } + } + if appendit { + njson := trim(jsres.Raw) + if njson[len(njson)-1] == ']' { + njson = njson[:len(njson)-1] + } + buf = append(buf, njson...) + if comma { + buf = append(buf, ',') + } + + buf = appendBuild(buf, true, paths, raw, stringify) + buf = append(buf, ']') + return buf, nil + } + buf = append(buf, '[') + ress := jsres.Array() + for i := 0; i < len(ress); i++ { + if i > 0 { + buf = append(buf, ',') + } + buf = append(buf, ress[i].Raw...) + } + if len(ress) == 0 { + buf = appendRepeat(buf, "null,", n-len(ress)) + } else { + buf = appendRepeat(buf, ",null", n-len(ress)) + if comma { + buf = append(buf, ',') + } + } + buf = appendBuild(buf, true, paths, raw, stringify) + buf = append(buf, ']') + return buf, nil + } +} + +func isOptimisticPath(path string) bool { + for i := 0; i < len(path); i++ { + if path[i] < '.' || path[i] > 'z' { + return false + } + if path[i] > '9' && path[i] < 'A' { + return false + } + if path[i] > 'z' { + return false + } + } + return true +} + +func set(jstr, path, raw string, + stringify, del, optimistic, inplace bool) ([]byte, error) { + if path == "" { + return nil, &errorType{"path cannot be empty"} + } + if !del && optimistic && isOptimisticPath(path) { + res := gjson.Get(jstr, path) + if res.Exists() && res.Index > 0 { + sz := len(jstr) - len(res.Raw) + len(raw) + if stringify { + sz += 2 + } + if inplace && sz <= len(jstr) { + if !stringify || !mustMarshalString(raw) { + jsonh := *(*reflect.StringHeader)(unsafe.Pointer(&jstr)) + jsonbh := reflect.SliceHeader{ + Data: jsonh.Data, Len: jsonh.Len, Cap: jsonh.Len} + jbytes := *(*[]byte)(unsafe.Pointer(&jsonbh)) + if stringify { + jbytes[res.Index] = '"' + copy(jbytes[res.Index+1:], []byte(raw)) + jbytes[res.Index+1+len(raw)] = '"' + copy(jbytes[res.Index+1+len(raw)+1:], + jbytes[res.Index+len(res.Raw):]) + } else { + copy(jbytes[res.Index:], []byte(raw)) + copy(jbytes[res.Index+len(raw):], + jbytes[res.Index+len(res.Raw):]) + } + return jbytes[:sz], nil + } + return nil, nil + } + buf := make([]byte, 0, sz) + buf = append(buf, jstr[:res.Index]...) + if stringify { + buf = appendStringify(buf, raw) + } else { + buf = append(buf, raw...) + } + buf = append(buf, jstr[res.Index+len(res.Raw):]...) + return buf, nil + } + } + // parse the path, make sure that it does not contain invalid characters + // such as '#', '?', '*' + paths := make([]pathResult, 0, 4) + r, err := parsePath(path) + if err != nil { + return nil, err + } + paths = append(paths, r) + for r.more { + if r, err = parsePath(r.path); err != nil { + return nil, err + } + paths = append(paths, r) + } + + njson, err := appendRawPaths(nil, jstr, paths, raw, stringify, del) + if err != nil { + return nil, err + } + return njson, nil +} + +// Set sets a json value for the specified path. +// A path is in dot syntax, such as "name.last" or "age". +// This function expects that the json is well-formed, and does not validate. +// Invalid json will not panic, but it may return back unexpected results. +// An error is returned if the path is not valid. +// +// A path is a series of keys separated by a dot. +// +// { +// "name": {"first": "Tom", "last": "Anderson"}, +// "age":37, +// "children": ["Sara","Alex","Jack"], +// "friends": [ +// {"first": "James", "last": "Murphy"}, +// {"first": "Roger", "last": "Craig"} +// ] +// } +// "name.last" >> "Anderson" +// "age" >> 37 +// "children.1" >> "Alex" +// +func Set(json, path string, value interface{}) (string, error) { + return SetOptions(json, path, value, nil) +} + +// SetOptions sets a json value for the specified path with options. +// A path is in dot syntax, such as "name.last" or "age". +// This function expects that the json is well-formed, and does not validate. +// Invalid json will not panic, but it may return back unexpected results. +// An error is returned if the path is not valid. +func SetOptions(json, path string, value interface{}, + opts *Options) (string, error) { + if opts != nil { + if opts.ReplaceInPlace { + // it's not safe to replace bytes in-place for strings + // copy the Options and set options.ReplaceInPlace to false. + nopts := *opts + opts = &nopts + opts.ReplaceInPlace = false + } + } + jsonh := *(*reflect.StringHeader)(unsafe.Pointer(&json)) + jsonbh := reflect.SliceHeader{Data: jsonh.Data, Len: jsonh.Len} + jsonb := *(*[]byte)(unsafe.Pointer(&jsonbh)) + res, err := SetBytesOptions(jsonb, path, value, opts) + return string(res), err +} + +// SetBytes sets a json value for the specified path. +// If working with bytes, this method preferred over +// Set(string(data), path, value) +func SetBytes(json []byte, path string, value interface{}) ([]byte, error) { + return SetBytesOptions(json, path, value, nil) +} + +// SetBytesOptions sets a json value for the specified path with options. +// If working with bytes, this method preferred over +// SetOptions(string(data), path, value) +func SetBytesOptions(json []byte, path string, value interface{}, + opts *Options) ([]byte, error) { + var optimistic, inplace bool + if opts != nil { + optimistic = opts.Optimistic + inplace = opts.ReplaceInPlace + } + jstr := *(*string)(unsafe.Pointer(&json)) + var res []byte + var err error + switch v := value.(type) { + default: + b, err := jsongo.Marshal(value) + if err != nil { + return nil, err + } + raw := *(*string)(unsafe.Pointer(&b)) + res, err = set(jstr, path, raw, false, false, optimistic, inplace) + case dtype: + res, err = set(jstr, path, "", false, true, optimistic, inplace) + case string: + res, err = set(jstr, path, v, true, false, optimistic, inplace) + case []byte: + raw := *(*string)(unsafe.Pointer(&v)) + res, err = set(jstr, path, raw, true, false, optimistic, inplace) + case bool: + if v { + res, err = set(jstr, path, "true", false, false, optimistic, inplace) + } else { + res, err = set(jstr, path, "false", false, false, optimistic, inplace) + } + case int8: + res, err = set(jstr, path, strconv.FormatInt(int64(v), 10), + false, false, optimistic, inplace) + case int16: + res, err = set(jstr, path, strconv.FormatInt(int64(v), 10), + false, false, optimistic, inplace) + case int32: + res, err = set(jstr, path, strconv.FormatInt(int64(v), 10), + false, false, optimistic, inplace) + case int64: + res, err = set(jstr, path, strconv.FormatInt(int64(v), 10), + false, false, optimistic, inplace) + case uint8: + res, err = set(jstr, path, strconv.FormatUint(uint64(v), 10), + false, false, optimistic, inplace) + case uint16: + res, err = set(jstr, path, strconv.FormatUint(uint64(v), 10), + false, false, optimistic, inplace) + case uint32: + res, err = set(jstr, path, strconv.FormatUint(uint64(v), 10), + false, false, optimistic, inplace) + case uint64: + res, err = set(jstr, path, strconv.FormatUint(uint64(v), 10), + false, false, optimistic, inplace) + case float32: + res, err = set(jstr, path, strconv.FormatFloat(float64(v), 'f', -1, 64), + false, false, optimistic, inplace) + case float64: + res, err = set(jstr, path, strconv.FormatFloat(float64(v), 'f', -1, 64), + false, false, optimistic, inplace) + } + if err == errNoChange { + return json, nil + } + return res, err +} + +// SetRaw sets a raw json value for the specified path. +// This function works the same as Set except that the value is set as a +// raw block of json. This allows for setting premarshalled json objects. +func SetRaw(json, path, value string) (string, error) { + return SetRawOptions(json, path, value, nil) +} + +// SetRawOptions sets a raw json value for the specified path with options. +// This furnction works the same as SetOptions except that the value is set +// as a raw block of json. This allows for setting premarshalled json objects. +func SetRawOptions(json, path, value string, opts *Options) (string, error) { + var optimistic bool + if opts != nil { + optimistic = opts.Optimistic + } + res, err := set(json, path, value, false, false, optimistic, false) + if err == errNoChange { + return json, nil + } + return string(res), err +} + +// SetRawBytes sets a raw json value for the specified path. +// If working with bytes, this method preferred over +// SetRaw(string(data), path, value) +func SetRawBytes(json []byte, path string, value []byte) ([]byte, error) { + return SetRawBytesOptions(json, path, value, nil) +} + +// SetRawBytesOptions sets a raw json value for the specified path with options. +// If working with bytes, this method preferred over +// SetRawOptions(string(data), path, value, opts) +func SetRawBytesOptions(json []byte, path string, value []byte, + opts *Options) ([]byte, error) { + jstr := *(*string)(unsafe.Pointer(&json)) + vstr := *(*string)(unsafe.Pointer(&value)) + var optimistic, inplace bool + if opts != nil { + optimistic = opts.Optimistic + inplace = opts.ReplaceInPlace + } + res, err := set(jstr, path, vstr, false, false, optimistic, inplace) + if err == errNoChange { + return json, nil + } + return res, err +} + +type dtype struct{} + +// Delete deletes a value from json for the specified path. +func Delete(json, path string) (string, error) { + return Set(json, path, dtype{}) +} + +// DeleteBytes deletes a value from json for the specified path. +func DeleteBytes(json []byte, path string) ([]byte, error) { + return SetBytes(json, path, dtype{}) +} diff --git a/vendor/src/github.com/tidwall/sjson/sjson_test.go b/vendor/src/github.com/tidwall/sjson/sjson_test.go new file mode 100644 index 00000000..a7a8f7e1 --- /dev/null +++ b/vendor/src/github.com/tidwall/sjson/sjson_test.go @@ -0,0 +1,1239 @@ +package sjson + +import ( + "bytes" + "encoding/hex" + gojson "encoding/json" + "fmt" + "math/rand" + "strings" + "testing" + "time" + + "github.com/Jeffail/gabs" + + jlexer "github.com/mailru/easyjson/jlexer" + jwriter "github.com/mailru/easyjson/jwriter" + fflib "github.com/pquerna/ffjson/fflib/v1" +) + +func TestInvalidPaths(t *testing.T) { + var err error + _, err = SetRaw(`{"hello":"world"}`, "", `"planet"`) + if err == nil || err.Error() != "path cannot be empty" { + t.Fatalf("expecting '%v', got '%v'", "path cannot be empty", err) + } + _, err = SetRaw("", "name.last.#", "") + if err == nil || err.Error() != "array access character not allowed in path" { + t.Fatalf("expecting '%v', got '%v'", "array access character not allowed in path", err) + } + _, err = SetRaw("", "name.last.\\1#", "") + if err == nil || err.Error() != "array access character not allowed in path" { + t.Fatalf("expecting '%v', got '%v'", "array access character not allowed in path", err) + } + _, err = SetRaw("", "name.las?t", "") + if err == nil || err.Error() != "wildcard characters not allowed in path" { + t.Fatalf("expecting '%v', got '%v'", "wildcard characters not allowed in path", err) + } + _, err = SetRaw("", "name.la\\s?t", "") + if err == nil || err.Error() != "wildcard characters not allowed in path" { + t.Fatalf("expecting '%v', got '%v'", "wildcard characters not allowed in path", err) + } + _, err = SetRaw("", "name.las*t", "") + if err == nil || err.Error() != "wildcard characters not allowed in path" { + t.Fatalf("expecting '%v', got '%v'", "wildcard characters not allowed in path", err) + } + _, err = SetRaw("", "name.las\\a*t", "") + if err == nil || err.Error() != "wildcard characters not allowed in path" { + t.Fatalf("expecting '%v', got '%v'", "wildcard characters not allowed in path", err) + } +} + +const ( + setRaw = 1 + setBool = 2 + setInt = 3 + setFloat = 4 + setString = 5 + setDelete = 6 +) + +func testRaw(t *testing.T, kind int, expect, json, path string, value interface{}) { + var json2 string + var err error + switch kind { + default: + json2, err = Set(json, path, value) + case setRaw: + json2, err = SetRaw(json, path, value.(string)) + case setDelete: + json2, err = Delete(json, path) + } + if err != nil { + t.Fatal(err) + } else if json2 != expect { + t.Fatalf("expected '%v', got '%v'", expect, json2) + } + + var json3 []byte + switch kind { + default: + json3, err = SetBytes([]byte(json), path, value) + case setRaw: + json3, err = SetRawBytes([]byte(json), path, []byte(value.(string))) + case setDelete: + json3, err = DeleteBytes([]byte(json), path) + } + if err != nil { + t.Fatal(err) + } else if string(json3) != expect { + t.Fatalf("expected '%v', got '%v'", expect, string(json3)) + } +} +func TestBasic(t *testing.T) { + testRaw(t, setRaw, `[{"hiw":"planet","hi":"world"}]`, `[{"hi":"world"}]`, "0.hiw", `"planet"`) + testRaw(t, setRaw, `[true]`, ``, "0", `true`) + testRaw(t, setRaw, `[null,true]`, ``, "1", `true`) + testRaw(t, setRaw, `[1,null,true]`, `[1]`, "2", `true`) + testRaw(t, setRaw, `[1,true,false]`, `[1,null,false]`, "1", `true`) + testRaw(t, setRaw, + `[1,{"hello":"when","this":[0,null,2]},false]`, + `[1,{"hello":"when","this":[0,1,2]},false]`, + "1.this.1", `null`) + testRaw(t, setRaw, + `{"a":1,"b":{"hello":"when","this":[0,null,2]},"c":false}`, + `{"a":1,"b":{"hello":"when","this":[0,1,2]},"c":false}`, + "b.this.1", `null`) + testRaw(t, setRaw, + `{"a":1,"b":{"hello":"when","this":[0,null,2,null,4]},"c":false}`, + `{"a":1,"b":{"hello":"when","this":[0,null,2]},"c":false}`, + "b.this.4", `4`) + testRaw(t, setRaw, + `{"b":{"this":[null,null,null,null,4]}}`, + ``, + "b.this.4", `4`) + testRaw(t, setRaw, + `[null,{"this":[null,null,null,null,4]}]`, + ``, + "1.this.4", `4`) + testRaw(t, setRaw, + `{"1":{"this":[null,null,null,null,4]}}`, + ``, + ":1.this.4", `4`) + testRaw(t, setRaw, + `{":1":{"this":[null,null,null,null,4]}}`, + ``, + "\\:1.this.4", `4`) + testRaw(t, setRaw, + `{":\1":{"this":[null,null,null,null,{".HI":4}]}}`, + ``, + "\\:\\\\1.this.4.\\.HI", `4`) + testRaw(t, setRaw, + `{"b":{"this":{"😇":""}}}`, + ``, + "b.this.😇", `""`) + testRaw(t, setRaw, + `[ 1,2 ,3]`, + ` [ 1,2 ] `, + "-1", `3`) + testRaw(t, setInt, `[1234]`, ``, `0`, int64(1234)) + testRaw(t, setFloat, `[1234.5]`, ``, `0`, float64(1234.5)) + testRaw(t, setString, `["1234.5"]`, ``, `0`, "1234.5") + testRaw(t, setBool, `[true]`, ``, `0`, true) + testRaw(t, setBool, `[null]`, ``, `0`, nil) + testRaw(t, setString, `{"arr":[1]}`, ``, `arr.-1`, 1) +} + +func TestDelete(t *testing.T) { + testRaw(t, setDelete, `[456]`, `[123,456]`, `0`, nil) + testRaw(t, setDelete, `[123,789]`, `[123,456,789]`, `1`, nil) + testRaw(t, setDelete, `[123,456]`, `[123,456,789]`, `-1`, nil) + testRaw(t, setDelete, `{"a":[123,456]}`, `{"a":[123,456,789]}`, `a.-1`, nil) + testRaw(t, setDelete, `{"and":"another"}`, `{"this":"that","and":"another"}`, `this`, nil) + testRaw(t, setDelete, `{"this":"that"}`, `{"this":"that","and":"another"}`, `and`, nil) + testRaw(t, setDelete, `{}`, `{"and":"another"}`, `and`, nil) + testRaw(t, setDelete, `{"1":"2"}`, `{"1":"2"}`, `3`, nil) +} + +// TestRandomData is a fuzzing test that throws random data at SetRaw +// function looking for panics. +func TestRandomData(t *testing.T) { + var lstr string + defer func() { + if v := recover(); v != nil { + println("'" + hex.EncodeToString([]byte(lstr)) + "'") + println("'" + lstr + "'") + panic(v) + } + }() + rand.Seed(time.Now().UnixNano()) + b := make([]byte, 200) + for i := 0; i < 2000000; i++ { + n, err := rand.Read(b[:rand.Int()%len(b)]) + if err != nil { + t.Fatal(err) + } + lstr = string(b[:n]) + SetRaw(lstr, "zzzz.zzzz.zzzz", "123") + } +} + +var exampleJSON = ` +{ + "sha": "d25341478381063d1c76e81b3a52e0592a7c997f", + "commit": { + "author": { + "name": "Tom Tom Anderson", + "email": "tomtom@anderson.edu", + "date": "2013-06-22T16:30:59Z" + }, + "committer": { + "name": "Tom Tom Anderson", + "email": "jeffditto@anderson.edu", + "date": "2013-06-22T16:30:59Z" + }, + "message": "Merge pull request #162 from stedolan/utf8-fixes\n\nUtf8 fixes. Closes #161", + "tree": { + "sha": "6ab697a8dfb5a96e124666bf6d6213822599fb40", + "url": "https://api.github.com/repos/stedolan/jq/git/trees/6ab697a8dfb5a96e124666bf6d6213822599fb40" + }, + "url": "https://api.github.com/repos/stedolan/jq/git/commits/d25341478381063d1c76e81b3a52e0592a7c997f", + "comment_count": 0 + } +} +` +var path = "commit.committer.email" +var value = "tomtom@anderson.com" +var rawValue = `"tomtom@anderson.com"` +var rawValueBytes = []byte(rawValue) +var expect = strings.Replace(exampleJSON, "jeffditto@anderson.edu", "tomtom@anderson.com", 1) +var jsonBytes = []byte(exampleJSON) +var jsonBytes2 = []byte(exampleJSON) +var expectBytes = []byte(expect) +var opts = &Options{Optimistic: true} +var optsInPlace = &Options{Optimistic: true, ReplaceInPlace: true} + +func BenchmarkSet(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := Set(exampleJSON, path, value) + if err != nil { + t.Fatal(err) + } + if res != expect { + t.Fatal("expected '%v', got '%v'", expect, res) + } + } +} + +func BenchmarkSetRaw(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := SetRaw(exampleJSON, path, rawValue) + if err != nil { + t.Fatal(err) + } + if res != expect { + t.Fatal("expected '%v', got '%v'", expect, res) + } + } +} + +func BenchmarkSetBytes(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := SetBytes(jsonBytes, path, value) + if err != nil { + t.Fatal(err) + } + if bytes.Compare(res, expectBytes) != 0 { + t.Fatal("expected '%v', got '%v'", expect, res) + } + } +} + +func BenchmarkSetRawBytes(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := SetRawBytes(jsonBytes, path, rawValueBytes) + if err != nil { + t.Fatal(err) + } + if bytes.Compare(res, expectBytes) != 0 { + t.Fatal("expected '%v', got '%v'", expect, res) + } + } +} + +func BenchmarkSetOptimistic(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := SetOptions(exampleJSON, path, value, opts) + if err != nil { + t.Fatal(err) + } + if res != expect { + t.Fatal("expected '%v', got '%v'", expect, res) + } + } +} + +func BenchmarkSetInPlace(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := SetOptions(exampleJSON, path, value, optsInPlace) + if err != nil { + t.Fatal(err) + } + if res != expect { + t.Fatal("expected '%v', got '%v'", expect, res) + } + } +} + +func BenchmarkSetRawOptimistic(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := SetRawOptions(exampleJSON, path, rawValue, opts) + if err != nil { + t.Fatal(err) + } + if res != expect { + t.Fatal("expected '%v', got '%v'", expect, res) + } + } +} + +func BenchmarkSetRawInPlace(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := SetRawOptions(exampleJSON, path, rawValue, optsInPlace) + if err != nil { + t.Fatal(err) + } + if res != expect { + t.Fatal("expected '%v', got '%v'", expect, res) + } + } +} + +func BenchmarkSetBytesOptimistic(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := SetBytesOptions(jsonBytes, path, value, opts) + if err != nil { + t.Fatal(err) + } + if bytes.Compare(res, expectBytes) != 0 { + t.Fatal("expected '%v', got '%v'", string(expectBytes), string(res)) + } + } +} + +func BenchmarkSetBytesInPlace(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + copy(jsonBytes2, jsonBytes) + res, err := SetBytesOptions(jsonBytes2, path, value, optsInPlace) + if err != nil { + t.Fatal(err) + } + if bytes.Compare(res, expectBytes) != 0 { + t.Fatal("expected '%v', got '%v'", string(expectBytes), string(res)) + } + } +} + +func BenchmarkSetRawBytesOptimistic(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + res, err := SetRawBytesOptions(jsonBytes, path, rawValueBytes, opts) + if err != nil { + t.Fatal(err) + } + if bytes.Compare(res, expectBytes) != 0 { + t.Fatal("expected '%v', got '%v'", string(expectBytes), string(res)) + } + } +} + +func BenchmarkSetRawBytesInPlace(t *testing.B) { + t.ReportAllocs() + for i := 0; i < t.N; i++ { + copy(jsonBytes2, jsonBytes) + res, err := SetRawBytesOptions(jsonBytes2, path, rawValueBytes, optsInPlace) + if err != nil { + t.Fatal(err) + } + if bytes.Compare(res, expectBytes) != 0 { + t.Fatal("expected '%v', got '%v'", string(expectBytes), string(res)) + } + } +} + +const benchJSON = ` +{ + "widget": { + "debug": "on", + "window": { + "title": "Sample Konfabulator Widget", + "name": "main_window", + "width": 500, + "height": 500 + }, + "image": { + "src": "Images/Sun.png", + "hOffset": 250, + "vOffset": 250, + "alignment": "center" + }, + "text": { + "data": "Click Here", + "size": 36, + "style": "bold", + "vOffset": 100, + "alignment": "center", + "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;" + } + } +} +` + +type BenchStruct struct { + Widget struct { + Debug string `json:"debug"` + Window struct { + Title string `json:"title"` + Name string `json:"name"` + Width int `json:"width"` + Height int `json:"height"` + } `json:"window"` + Image struct { + Src string `json:"src"` + HOffset int `json:"hOffset"` + VOffset int `json:"vOffset"` + Alignment string `json:"alignment"` + } `json:"image"` + Text struct { + Data string `json:"data"` + Size int `json:"size"` + Style string `json:"style"` + VOffset int `json:"vOffset"` + Alignment string `json:"alignment"` + OnMouseUp string `json:"onMouseUp"` + } `json:"text"` + } `json:"widget"` +} + +var benchPaths = []string{ + "widget.window.name", + "widget.image.hOffset", + "widget.text.onMouseUp", +} + +func Benchmark_SJSON(t *testing.B) { + opts := Options{Optimistic: true} + t.ReportAllocs() + t.ResetTimer() + for i := 0; i < t.N; i++ { + for _, path := range benchPaths { + var err error + switch path { + case "widget.window.name": + _, err = SetOptions(benchJSON, path, "1", &opts) + case "widget.image.hOffset": + _, err = SetOptions(benchJSON, path, 1, &opts) + case "widget.text.onMouseUp": + _, err = SetOptions(benchJSON, path, "1", &opts) + } + if err != nil { + t.Fatal(err) + } + + } + } + t.N *= len(benchPaths) +} + +func Benchmark_SJSON_ReplaceInPlace(t *testing.B) { + data := []byte(benchJSON) + opts := Options{ + Optimistic: true, + ReplaceInPlace: true, + } + v1, v2 := []byte(`"1"`), []byte("1") + t.ReportAllocs() + t.ResetTimer() + for i := 0; i < t.N; i++ { + for _, path := range benchPaths { + var err error + switch path { + case "widget.window.name": + _, err = SetRawBytesOptions(data, path, v1, &opts) + case "widget.image.hOffset": + _, err = SetRawBytesOptions(data, path, v2, &opts) + case "widget.text.onMouseUp": + _, err = SetRawBytesOptions(data, path, v1, &opts) + } + if err != nil { + t.Fatal(err) + } + + } + } + t.N *= len(benchPaths) +} + +func Benchmark_Encoding_JSON_Map(t *testing.B) { + data := []byte(benchJSON) + t.ReportAllocs() + t.ResetTimer() + for i := 0; i < t.N; i++ { + for _, path := range benchPaths { + var m map[string]interface{} + if err := gojson.Unmarshal(data, &m); err != nil { + t.Fatal(err) + } + switch path { + case "widget.window.name": + m["widget"].(map[string]interface{})["window"].(map[string]interface{})["name"] = "1" + case "widget.image.hOffset": + m["widget"].(map[string]interface{})["image"].(map[string]interface{})["hOffset"] = 1 + case "widget.text.onMouseUp": + m["widget"].(map[string]interface{})["text"].(map[string]interface{})["onMouseUp"] = "1" + } + _, err := gojson.Marshal(&m) + if err != nil { + t.Fatal(err) + } + } + } + t.N *= len(benchPaths) +} + +func Benchmark_Encoding_JSON_Struct(t *testing.B) { + data := []byte(benchJSON) + t.ReportAllocs() + t.ResetTimer() + for i := 0; i < t.N; i++ { + for _, path := range benchPaths { + var v BenchStruct + if err := gojson.Unmarshal(data, &v); err != nil { + t.Fatal(err) + } + switch path { + case "widget.window.name": + v.Widget.Window.Name = "1" + case "widget.image.hOffset": + v.Widget.Image.HOffset = 1 + case "widget.text.onMouseUp": + v.Widget.Text.OnMouseUp = "1" + } + _, err := gojson.Marshal(&v) + if err != nil { + t.Fatal(err) + } + } + } + t.N *= len(benchPaths) +} + +func Benchmark_Gabs(t *testing.B) { + data := []byte(benchJSON) + t.ReportAllocs() + t.ResetTimer() + for i := 0; i < t.N; i++ { + for _, path := range benchPaths { + jsonParsed, err := gabs.ParseJSON(data) + if err != nil { + t.Fatal(err) + } + switch path { + case "widget.window.name": + jsonParsed.SetP("1", path) + case "widget.image.hOffset": + jsonParsed.SetP(1, path) + case "widget.text.onMouseUp": + jsonParsed.SetP("1", path) + } + jsonParsed.String() + } + } + t.N *= len(benchPaths) +} + +func Benchmark_FFJSON(t *testing.B) { + data := []byte(benchJSON) + t.ReportAllocs() + t.ResetTimer() + for i := 0; i < t.N; i++ { + for _, path := range benchPaths { + var v BenchStruct + if err := v.UnmarshalFFJSONFromData(data); err != nil { + t.Fatal(err) + } + switch path { + case "widget.window.name": + v.Widget.Window.Name = "1" + case "widget.image.hOffset": + v.Widget.Image.HOffset = 1 + case "widget.text.onMouseUp": + v.Widget.Text.OnMouseUp = "1" + } + _, err := v.MarshalFFJSONFromData() + if err != nil { + t.Fatal(err) + } + } + } + t.N *= len(benchPaths) +} + +func Benchmark_EasyJSON(t *testing.B) { + data := []byte(benchJSON) + t.ReportAllocs() + t.ResetTimer() + for i := 0; i < t.N; i++ { + for _, path := range benchPaths { + var v BenchStruct + if err := v.UnmarshalEasyJSONFromData(data); err != nil { + t.Fatal(err) + } + switch path { + case "widget.window.name": + v.Widget.Window.Name = "1" + case "widget.image.hOffset": + v.Widget.Image.HOffset = 1 + case "widget.text.onMouseUp": + v.Widget.Text.OnMouseUp = "1" + } + _, err := v.MarshalEasyJSONFromData() + if err != nil { + t.Fatal(err) + } + } + } + t.N *= len(benchPaths) +} + +////////////////////////////////////////////////////////////// +// EVERYTHING BELOW IS AUTOGENERATED + +// suppress unused package warning +var ( + _ = gojson.RawMessage{} + _ = jlexer.Lexer{} + _ = jwriter.Writer{} +) + +func easyjsonDbb23193DecodeGithubComTidwallSjson(in *jlexer.Lexer, out *BenchStruct) { + if in.IsNull() { + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeString() + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "widget": + easyjsonDbb23193Decode(in, &out.Widget) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') +} +func easyjsonDbb23193EncodeGithubComTidwallSjson(out *jwriter.Writer, in BenchStruct) { + out.RawByte('{') + first := true + _ = first + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"widget\":") + easyjsonDbb23193Encode(out, in.Widget) + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v BenchStruct) MarshalEasyJSONFromData() ([]byte, error) { + w := jwriter.Writer{} + easyjsonDbb23193EncodeGithubComTidwallSjson(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v BenchStruct) MarshalEasyJSON(w *jwriter.Writer) { + easyjsonDbb23193EncodeGithubComTidwallSjson(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *BenchStruct) UnmarshalEasyJSONFromData(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjsonDbb23193DecodeGithubComTidwallSjson(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *BenchStruct) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjsonDbb23193DecodeGithubComTidwallSjson(l, v) +} +func easyjsonDbb23193Decode(in *jlexer.Lexer, out *struct { + Debug string "json:\"debug\"" + Window struct { + Title string "json:\"title\"" + Name string "json:\"name\"" + Width int "json:\"width\"" + Height int "json:\"height\"" + } "json:\"window\"" + Image struct { + Src string "json:\"src\"" + HOffset int "json:\"hOffset\"" + VOffset int "json:\"vOffset\"" + Alignment string "json:\"alignment\"" + } "json:\"image\"" + Text struct { + Data string "json:\"data\"" + Size int "json:\"size\"" + Style string "json:\"style\"" + VOffset int "json:\"vOffset\"" + Alignment string "json:\"alignment\"" + OnMouseUp string "json:\"onMouseUp\"" + } "json:\"text\"" +}) { + if in.IsNull() { + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeString() + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "debug": + out.Debug = string(in.String()) + case "window": + easyjsonDbb23193Decode1(in, &out.Window) + case "image": + easyjsonDbb23193Decode2(in, &out.Image) + case "text": + easyjsonDbb23193Decode3(in, &out.Text) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') +} +func easyjsonDbb23193Encode(out *jwriter.Writer, in struct { + Debug string "json:\"debug\"" + Window struct { + Title string "json:\"title\"" + Name string "json:\"name\"" + Width int "json:\"width\"" + Height int "json:\"height\"" + } "json:\"window\"" + Image struct { + Src string "json:\"src\"" + HOffset int "json:\"hOffset\"" + VOffset int "json:\"vOffset\"" + Alignment string "json:\"alignment\"" + } "json:\"image\"" + Text struct { + Data string "json:\"data\"" + Size int "json:\"size\"" + Style string "json:\"style\"" + VOffset int "json:\"vOffset\"" + Alignment string "json:\"alignment\"" + OnMouseUp string "json:\"onMouseUp\"" + } "json:\"text\"" +}) { + out.RawByte('{') + first := true + _ = first + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"debug\":") + out.String(string(in.Debug)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"window\":") + easyjsonDbb23193Encode1(out, in.Window) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"image\":") + easyjsonDbb23193Encode2(out, in.Image) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"text\":") + easyjsonDbb23193Encode3(out, in.Text) + out.RawByte('}') +} +func easyjsonDbb23193Decode3(in *jlexer.Lexer, out *struct { + Data string "json:\"data\"" + Size int "json:\"size\"" + Style string "json:\"style\"" + VOffset int "json:\"vOffset\"" + Alignment string "json:\"alignment\"" + OnMouseUp string "json:\"onMouseUp\"" +}) { + if in.IsNull() { + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeString() + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "data": + out.Data = string(in.String()) + case "size": + out.Size = int(in.Int()) + case "style": + out.Style = string(in.String()) + case "vOffset": + out.VOffset = int(in.Int()) + case "alignment": + out.Alignment = string(in.String()) + case "onMouseUp": + out.OnMouseUp = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') +} +func easyjsonDbb23193Encode3(out *jwriter.Writer, in struct { + Data string "json:\"data\"" + Size int "json:\"size\"" + Style string "json:\"style\"" + VOffset int "json:\"vOffset\"" + Alignment string "json:\"alignment\"" + OnMouseUp string "json:\"onMouseUp\"" +}) { + out.RawByte('{') + first := true + _ = first + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"data\":") + out.String(string(in.Data)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"size\":") + out.Int(int(in.Size)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"style\":") + out.String(string(in.Style)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"vOffset\":") + out.Int(int(in.VOffset)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"alignment\":") + out.String(string(in.Alignment)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"onMouseUp\":") + out.String(string(in.OnMouseUp)) + out.RawByte('}') +} +func easyjsonDbb23193Decode2(in *jlexer.Lexer, out *struct { + Src string "json:\"src\"" + HOffset int "json:\"hOffset\"" + VOffset int "json:\"vOffset\"" + Alignment string "json:\"alignment\"" +}) { + if in.IsNull() { + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeString() + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "src": + out.Src = string(in.String()) + case "hOffset": + out.HOffset = int(in.Int()) + case "vOffset": + out.VOffset = int(in.Int()) + case "alignment": + out.Alignment = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') +} +func easyjsonDbb23193Encode2(out *jwriter.Writer, in struct { + Src string "json:\"src\"" + HOffset int "json:\"hOffset\"" + VOffset int "json:\"vOffset\"" + Alignment string "json:\"alignment\"" +}) { + out.RawByte('{') + first := true + _ = first + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"src\":") + out.String(string(in.Src)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"hOffset\":") + out.Int(int(in.HOffset)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"vOffset\":") + out.Int(int(in.VOffset)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"alignment\":") + out.String(string(in.Alignment)) + out.RawByte('}') +} +func easyjsonDbb23193Decode1(in *jlexer.Lexer, out *struct { + Title string "json:\"title\"" + Name string "json:\"name\"" + Width int "json:\"width\"" + Height int "json:\"height\"" +}) { + if in.IsNull() { + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeString() + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "title": + out.Title = string(in.String()) + case "name": + out.Name = string(in.String()) + case "width": + out.Width = int(in.Int()) + case "height": + out.Height = int(in.Int()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') +} +func easyjsonDbb23193Encode1(out *jwriter.Writer, in struct { + Title string "json:\"title\"" + Name string "json:\"name\"" + Width int "json:\"width\"" + Height int "json:\"height\"" +}) { + out.RawByte('{') + first := true + _ = first + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"title\":") + out.String(string(in.Title)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"name\":") + out.String(string(in.Name)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"width\":") + out.Int(int(in.Width)) + if !first { + out.RawByte(',') + } + first = false + out.RawString("\"height\":") + out.Int(int(in.Height)) + out.RawByte('}') +} +func (mj *BenchStruct) MarshalFFJSONFromData() ([]byte, error) { + var buf fflib.Buffer + if mj == nil { + buf.WriteString("null") + return buf.Bytes(), nil + } + err := mj.MarshalJSONBufFFJSON(&buf) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} +func (mj *BenchStruct) MarshalJSONBufFFJSON(buf fflib.EncodingBuffer) error { + if mj == nil { + buf.WriteString("null") + return nil + } + var err error + var obj []byte + _ = obj + _ = err + /* Inline struct. type=struct { Debug string "json:\"debug\""; Window struct { Title string "json:\"title\""; Name string "json:\"name\""; Width int "json:\"width\""; Height int "json:\"height\"" } "json:\"window\""; Image struct { Src string "json:\"src\""; HOffset int "json:\"hOffset\""; VOffset int "json:\"vOffset\""; Alignment string "json:\"alignment\"" } "json:\"image\""; Text struct { Data string "json:\"data\""; Size int "json:\"size\""; Style string "json:\"style\""; VOffset int "json:\"vOffset\""; Alignment string "json:\"alignment\""; OnMouseUp string "json:\"onMouseUp\"" } "json:\"text\"" } kind=struct */ + buf.WriteString(`{"widget":{ "debug":`) + fflib.WriteJsonString(buf, string(mj.Widget.Debug)) + /* Inline struct. type=struct { Title string "json:\"title\""; Name string "json:\"name\""; Width int "json:\"width\""; Height int "json:\"height\"" } kind=struct */ + buf.WriteString(`,"window":{ "title":`) + fflib.WriteJsonString(buf, string(mj.Widget.Window.Title)) + buf.WriteString(`,"name":`) + fflib.WriteJsonString(buf, string(mj.Widget.Window.Name)) + buf.WriteString(`,"width":`) + fflib.FormatBits2(buf, uint64(mj.Widget.Window.Width), 10, mj.Widget.Window.Width < 0) + buf.WriteString(`,"height":`) + fflib.FormatBits2(buf, uint64(mj.Widget.Window.Height), 10, mj.Widget.Window.Height < 0) + buf.WriteByte('}') + /* Inline struct. type=struct { Src string "json:\"src\""; HOffset int "json:\"hOffset\""; VOffset int "json:\"vOffset\""; Alignment string "json:\"alignment\"" } kind=struct */ + buf.WriteString(`,"image":{ "src":`) + fflib.WriteJsonString(buf, string(mj.Widget.Image.Src)) + buf.WriteString(`,"hOffset":`) + fflib.FormatBits2(buf, uint64(mj.Widget.Image.HOffset), 10, mj.Widget.Image.HOffset < 0) + buf.WriteString(`,"vOffset":`) + fflib.FormatBits2(buf, uint64(mj.Widget.Image.VOffset), 10, mj.Widget.Image.VOffset < 0) + buf.WriteString(`,"alignment":`) + fflib.WriteJsonString(buf, string(mj.Widget.Image.Alignment)) + buf.WriteByte('}') + /* Inline struct. type=struct { Data string "json:\"data\""; Size int "json:\"size\""; Style string "json:\"style\""; VOffset int "json:\"vOffset\""; Alignment string "json:\"alignment\""; OnMouseUp string "json:\"onMouseUp\"" } kind=struct */ + buf.WriteString(`,"text":{ "data":`) + fflib.WriteJsonString(buf, string(mj.Widget.Text.Data)) + buf.WriteString(`,"size":`) + fflib.FormatBits2(buf, uint64(mj.Widget.Text.Size), 10, mj.Widget.Text.Size < 0) + buf.WriteString(`,"style":`) + fflib.WriteJsonString(buf, string(mj.Widget.Text.Style)) + buf.WriteString(`,"vOffset":`) + fflib.FormatBits2(buf, uint64(mj.Widget.Text.VOffset), 10, mj.Widget.Text.VOffset < 0) + buf.WriteString(`,"alignment":`) + fflib.WriteJsonString(buf, string(mj.Widget.Text.Alignment)) + buf.WriteString(`,"onMouseUp":`) + fflib.WriteJsonString(buf, string(mj.Widget.Text.OnMouseUp)) + buf.WriteByte('}') + buf.WriteByte('}') + buf.WriteByte('}') + return nil +} + +const ( + ffj_t_BenchStructbase = iota + ffj_t_BenchStructno_such_key + + ffj_t_BenchStruct_Widget +) + +var ffj_key_BenchStruct_Widget = []byte("widget") + +func (uj *BenchStruct) UnmarshalFFJSONFromData(input []byte) error { + fs := fflib.NewFFLexer(input) + return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) +} + +func (uj *BenchStruct) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { + var err error = nil + currentKey := ffj_t_BenchStructbase + _ = currentKey + tok := fflib.FFTok_init + wantedTok := fflib.FFTok_init + +mainparse: + for { + tok = fs.Scan() + // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) + if tok == fflib.FFTok_error { + goto tokerror + } + + switch state { + + case fflib.FFParse_map_start: + if tok != fflib.FFTok_left_bracket { + wantedTok = fflib.FFTok_left_bracket + goto wrongtokenerror + } + state = fflib.FFParse_want_key + continue + + case fflib.FFParse_after_value: + if tok == fflib.FFTok_comma { + state = fflib.FFParse_want_key + } else if tok == fflib.FFTok_right_bracket { + goto done + } else { + wantedTok = fflib.FFTok_comma + goto wrongtokenerror + } + + case fflib.FFParse_want_key: + // json {} ended. goto exit. woo. + if tok == fflib.FFTok_right_bracket { + goto done + } + if tok != fflib.FFTok_string { + wantedTok = fflib.FFTok_string + goto wrongtokenerror + } + + kn := fs.Output.Bytes() + if len(kn) <= 0 { + // "" case. hrm. + currentKey = ffj_t_BenchStructno_such_key + state = fflib.FFParse_want_colon + goto mainparse + } else { + switch kn[0] { + + case 'w': + + if bytes.Equal(ffj_key_BenchStruct_Widget, kn) { + currentKey = ffj_t_BenchStruct_Widget + state = fflib.FFParse_want_colon + goto mainparse + } + + } + + if fflib.SimpleLetterEqualFold(ffj_key_BenchStruct_Widget, kn) { + currentKey = ffj_t_BenchStruct_Widget + state = fflib.FFParse_want_colon + goto mainparse + } + + currentKey = ffj_t_BenchStructno_such_key + state = fflib.FFParse_want_colon + goto mainparse + } + + case fflib.FFParse_want_colon: + if tok != fflib.FFTok_colon { + wantedTok = fflib.FFTok_colon + goto wrongtokenerror + } + state = fflib.FFParse_want_value + continue + case fflib.FFParse_want_value: + + if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { + switch currentKey { + + case ffj_t_BenchStruct_Widget: + goto handle_Widget + + case ffj_t_BenchStructno_such_key: + err = fs.SkipField(tok) + if err != nil { + return fs.WrapErr(err) + } + state = fflib.FFParse_after_value + goto mainparse + } + } else { + goto wantedvalue + } + } + } + +handle_Widget: + + /* handler: uj.Widget type=struct { Debug string "json:\"debug\""; Window struct { Title string "json:\"title\""; Name string "json:\"name\""; Width int "json:\"width\""; Height int "json:\"height\"" } "json:\"window\""; Image struct { Src string "json:\"src\""; HOffset int "json:\"hOffset\""; VOffset int "json:\"vOffset\""; Alignment string "json:\"alignment\"" } "json:\"image\""; Text struct { Data string "json:\"data\""; Size int "json:\"size\""; Style string "json:\"style\""; VOffset int "json:\"vOffset\""; Alignment string "json:\"alignment\""; OnMouseUp string "json:\"onMouseUp\"" } "json:\"text\"" } kind=struct quoted=false*/ + + { + /* Falling back. type=struct { Debug string "json:\"debug\""; Window struct { Title string "json:\"title\""; Name string "json:\"name\""; Width int "json:\"width\""; Height int "json:\"height\"" } "json:\"window\""; Image struct { Src string "json:\"src\""; HOffset int "json:\"hOffset\""; VOffset int "json:\"vOffset\""; Alignment string "json:\"alignment\"" } "json:\"image\""; Text struct { Data string "json:\"data\""; Size int "json:\"size\""; Style string "json:\"style\""; VOffset int "json:\"vOffset\""; Alignment string "json:\"alignment\""; OnMouseUp string "json:\"onMouseUp\"" } "json:\"text\"" } kind=struct */ + tbuf, err := fs.CaptureField(tok) + if err != nil { + return fs.WrapErr(err) + } + + err = gojson.Unmarshal(tbuf, &uj.Widget) + if err != nil { + return fs.WrapErr(err) + } + } + + state = fflib.FFParse_after_value + goto mainparse + +wantedvalue: + return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) +wrongtokenerror: + return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) +tokerror: + if fs.BigError != nil { + return fs.WrapErr(fs.BigError) + } + err = fs.Error.ToError() + if err != nil { + return fs.WrapErr(err) + } + panic("ffjson-generated: unreachable, please report bug.") +done: + return nil +}