diff --git a/cmd/config-v14.go b/cmd/config-v14.go index 14ae16725..070b7cce5 100644 --- a/cmd/config-v14.go +++ b/cmd/config-v14.go @@ -18,11 +18,13 @@ package cmd import ( "errors" + "io/ioutil" "os" "strings" "sync" "github.com/minio/minio/pkg/quick" + "github.com/tidwall/gjson" ) // Read Write mutex for safe access to ServerConfig. @@ -158,6 +160,116 @@ func loadConfig(envParams envParams) error { return nil } +// doCheckDupJSONKeys recursively detects duplicate json keys +func doCheckDupJSONKeys(key, value gjson.Result) error { + // Key occurrences map of the current scope to count + // if there is any duplicated json key. + keysOcc := make(map[string]int) + + // Holds the found error + var checkErr error + + // Iterate over keys in the current json scope + value.ForEach(func(k, v gjson.Result) bool { + // If current key is not null, check if its + // value contains some duplicated keys. + if k.Type != gjson.Null { + keysOcc[k.String()]++ + checkErr = doCheckDupJSONKeys(k, v) + } + return checkErr == nil + }) + + // Check found err + if checkErr != nil { + return errors.New(key.String() + " => " + checkErr.Error()) + } + + // Check for duplicated keys + for k, v := range keysOcc { + if v > 1 { + return errors.New(key.String() + " => `" + k + "` entry is duplicated") + } + } + + return nil +} + +// Check recursively if a key is duplicated in the same json scope +// e.g.: +// `{ "key" : { "key" ..` is accepted +// `{ "key" : { "subkey" : "val1", "subkey": "val2" ..` throws subkey duplicated error +func checkDupJSONKeys(json string) error { + // Parse config with gjson library + config := gjson.Parse(json) + + // Create a fake rootKey since root json doesn't seem to have representation + // in gjson library. + rootKey := gjson.Result{Type: gjson.String, Str: minioConfigFile} + + // Check if loaded json contains any duplicated keys + return doCheckDupJSONKeys(rootKey, config) +} + +// validateConfig checks for +func validateConfig() error { + + // Get file config path + configFile := getConfigFile() + + srvCfg := &serverConfigV14{} + + // Load config file + qc, err := quick.New(srvCfg) + if err != nil { + return err + } + if err = qc.Load(configFile); err != nil { + return err + } + + // Check if config version is valid + if srvCfg.GetVersion() != v14 { + return errors.New("bad config version, expected: " + v14) + } + + // Load config file json and check for duplication json keys + jsonBytes, err := ioutil.ReadFile(configFile) + if err != nil { + return err + } + if err := checkDupJSONKeys(string(jsonBytes)); err != nil { + return err + } + + // Validate region field + if srvCfg.GetRegion() == "" { + return errors.New("region config is empty") + } + + // Validate browser field + if b := strings.ToLower(srvCfg.GetBrowser()); b != "on" && b != "off" { + return errors.New("invalid browser config") + } + + // Validate credential field + if err := srvCfg.Credential.Validate(); err != nil { + return err + } + + // Validate logger field + if err := srvCfg.Logger.Validate(); err != nil { + return err + } + + // Validate notify field + if err := srvCfg.Notify.Validate(); err != nil { + return err + } + + return nil +} + // serverConfig server config. var serverConfig *serverConfigV14 diff --git a/cmd/config-v14_test.go b/cmd/config-v14_test.go index d3ca1aba4..eadb5966a 100644 --- a/cmd/config-v14_test.go +++ b/cmd/config-v14_test.go @@ -17,9 +17,13 @@ package cmd import ( + "io/ioutil" "os" + "path/filepath" "reflect" "testing" + + "github.com/tidwall/gjson" ) func TestServerConfig(t *testing.T) { @@ -167,3 +171,117 @@ func TestServerConfigWithEnvs(t *testing.T) { t.Errorf("Expecting access key to be `minio123` found %s", cred.SecretKey) } } + +func TestCheckDupJSONKeys(t *testing.T) { + testCases := []struct { + json string + shouldPass bool + }{ + {`{}`, true}, + {`{"version" : "13"}`, true}, + {`{"version" : "13", "version": "14"}`, false}, + {`{"version" : "13", "credential": {"accessKey": "12345"}}`, true}, + {`{"version" : "13", "credential": {"accessKey": "12345", "accessKey":"12345"}}`, false}, + {`{"version" : "13", "notify": {"amqp": {"1"}, "webhook":{"3"}}}`, true}, + {`{"version" : "13", "notify": {"amqp": {"1"}, "amqp":{"3"}}}`, false}, + {`{"version" : "13", "notify": {"amqp": {"1":{}, "2":{}}}}`, true}, + {`{"version" : "13", "notify": {"amqp": {"1":{}, "1":{}}}}`, false}, + } + + for i, testCase := range testCases { + err := doCheckDupJSONKeys(gjson.Result{}, gjson.Parse(testCase.json)) + if testCase.shouldPass && err != nil { + t.Errorf("Test %d, should pass but it failed with err = %v", i+1, err) + } + if !testCase.shouldPass && err == nil { + t.Errorf("Test %d, should fail but it succeed.", i+1) + } + } + +} + +func TestValidateConfig(t *testing.T) { + rootPath, err := newTestConfig(globalMinioDefaultRegion) + if err != nil { + t.Fatalf("Init Test config failed") + } + // remove the root directory after the test ends. + defer removeAll(rootPath) + + configPath := filepath.Join(rootPath, minioConfigFile) + + v := v14 + + testCases := []struct { + configData string + shouldPass bool + }{ + // Test 1 - wrong json + {`{`, false}, + + // Test 2 - empty json + {`{}`, false}, + + // Test 3 - wrong config version + {`{"version": "10"}`, false}, + + // Test 4 - wrong browser parameter + {`{"version": "` + v + `", "browser": "foo"}`, false}, + + // Test 5 - missing credential + {`{"version": "` + v + `", "browser": "on"}`, false}, + + // Test 6 - missing secret key + {`{"version": "` + v + `", "browser": "on", "credential" : {"accessKey":"minio", "secretKey":""}}`, false}, + + // Test 7 - missing region + {`{"version": "` + v + `", "browser": "on", "credential" : {"accessKey":"minio", "secretKey":"minio123"}}`, false}, + + // Test 8 - success + {`{"version": "` + v + `", "browser": "on", "region":"us-east-1", "credential" : {"accessKey":"minio", "secretKey":"minio123"}}`, true}, + + // Test 9 - duplicated json keys + {`{"version": "` + v + `", "browser": "on", "browser": "on", "region":"us-east-1", "credential" : {"accessKey":"minio", "secretKey":"minio123"}}`, false}, + + // Test 10 - Wrong Console logger level + {`{"version": "` + v + `", "credential": { "accessKey": "minio", "secretKey": "minio123" }, "region": "us-east-1", "browser": "on", "logger": { "console": { "enable": true, "level": "foo" } }}`, false}, + + // Test 11 - Wrong File logger level + {`{"version": "` + v + `", "credential": { "accessKey": "minio", "secretKey": "minio123" }, "region": "us-east-1", "browser": "on", "logger": { "file": { "enable": true, "level": "foo" } }}`, false}, + + // Test 12 - Test AMQP + {`{"version": "` + v + `", "credential": { "accessKey": "minio", "secretKey": "minio123" }, "region": "us-east-1", "browser": "on", "notify": { "amqp": { "1": { "enable": true, "url": "", "exchange": "", "routingKey": "", "exchangeType": "", "mandatory": false, "immediate": false, "durable": false, "internal": false, "noWait": false, "autoDeleted": false }}}}`, false}, + + // Test 13 - Test NATS + {`{"version": "` + v + `", "credential": { "accessKey": "minio", "secretKey": "minio123" }, "region": "us-east-1", "browser": "on", "notify": { "nats": { "1": { "enable": true, "address": "", "subject": "", "username": "", "password": "", "token": "", "secure": false, "pingInterval": 0, "streaming": { "enable": false, "clusterID": "", "clientID": "", "async": false, "maxPubAcksInflight": 0 } } }}}`, false}, + + // Test 14 - Test ElasticSearch + {`{"version": "` + v + `", "credential": { "accessKey": "minio", "secretKey": "minio123" }, "region": "us-east-1", "browser": "on", "notify": { "elasticsearch": { "1": { "enable": true, "url": "", "index": "" } }}}`, false}, + + // Test 15 - Test Redis + {`{"version": "` + v + `", "credential": { "accessKey": "minio", "secretKey": "minio123" }, "region": "us-east-1", "browser": "on", "notify": { "redis": { "1": { "enable": true, "address": "", "password": "", "key": "" } }}}`, false}, + + // Test 16 - Test PostgreSQL + {`{"version": "` + v + `", "credential": { "accessKey": "minio", "secretKey": "minio123" }, "region": "us-east-1", "browser": "on", "notify": { "postgresql": { "1": { "enable": true, "connectionString": "", "table": "", "host": "", "port": "", "user": "", "password": "", "database": "" }}}}`, false}, + + // Test 17 - Test Kafka + {`{"version": "` + v + `", "credential": { "accessKey": "minio", "secretKey": "minio123" }, "region": "us-east-1", "browser": "on", "notify": { "kafka": { "1": { "enable": true, "brokers": null, "topic": "" } }}}`, false}, + + // Test 18 - Test Webhook + {`{"version": "` + v + `", "credential": { "accessKey": "minio", "secretKey": "minio123" }, "region": "us-east-1", "browser": "on", "notify": { "webhook": { "1": { "enable": true, "endpoint": "" } }}}`, false}, + } + + for i, testCase := range testCases { + if err := ioutil.WriteFile(configPath, []byte(testCase.configData), 0700); err != nil { + t.Error(err) + } + err := validateConfig() + if testCase.shouldPass && err != nil { + t.Errorf("Test %d, should pass but it failed with err = %v", i+1, err) + } + if !testCase.shouldPass && err == nil { + t.Errorf("Test %d, should fail but it succeed.", i+1) + } + } + +} diff --git a/cmd/credential.go b/cmd/credential.go index c75049050..c6f9953a4 100644 --- a/cmd/credential.go +++ b/cmd/credential.go @@ -19,6 +19,7 @@ package cmd import ( "crypto/rand" "encoding/base64" + "errors" "os" "github.com/minio/mc/pkg/console" @@ -75,6 +76,16 @@ type credential struct { secretKeyHash []byte } +func (c *credential) Validate() error { + if !isAccessKeyValid(c.AccessKey) { + return errors.New("Invalid access key") + } + if !isSecretKeyValid(c.SecretKey) { + return errors.New("Invalid secret key") + } + return nil +} + // Generate a bcrypt hashed key for input secret key. func mustGetHashedSecretKey(secretKey string) []byte { hashedSecretKey, err := bcrypt.GenerateFromPassword([]byte(secretKey), bcrypt.DefaultCost) diff --git a/cmd/logger-console-hook.go b/cmd/logger-console-hook.go index fdc5264e8..a437ec1ad 100644 --- a/cmd/logger-console-hook.go +++ b/cmd/logger-console-hook.go @@ -16,7 +16,12 @@ package cmd -import "github.com/Sirupsen/logrus" +import ( + "fmt" + "strings" + + "github.com/Sirupsen/logrus" +) // consoleLogger - default logger if not other logging is enabled. type consoleLogger struct { @@ -24,6 +29,14 @@ type consoleLogger struct { Level string `json:"level"` } +func (c *consoleLogger) Validate() error { + level := strings.ToLower(c.Level) + if level != "error" && level != "fatal" && level != "" { + return fmt.Errorf("`%s` level value not recognized", c.Level) + } + return nil +} + // enable console logger. func enableConsoleLogger() { clogger := serverConfig.Logger.GetConsole() diff --git a/cmd/logger-file-hook.go b/cmd/logger-file-hook.go index c7b5f6652..9fbd20ea2 100644 --- a/cmd/logger-file-hook.go +++ b/cmd/logger-file-hook.go @@ -20,6 +20,7 @@ import ( "fmt" "io/ioutil" "os" + "strings" "github.com/Sirupsen/logrus" ) @@ -30,6 +31,14 @@ type fileLogger struct { Level string `json:"level"` } +func (f *fileLogger) Validate() error { + level := strings.ToLower(f.Level) + if level != "error" && level != "fatal" && level != "" { + return fmt.Errorf("`%s` level value not recognized", f.Level) + } + return nil +} + type localFile struct { *os.File } diff --git a/cmd/logger.go b/cmd/logger.go index a0f999fcf..a6ef792b8 100644 --- a/cmd/logger.go +++ b/cmd/logger.go @@ -47,6 +47,20 @@ type logger struct { /// Logger related. +// Validate logger contents +func (l *logger) Validate() error { + if l == nil { + return nil + } + if err := l.Console.Validate(); err != nil { + return fmt.Errorf("`Console` field: %s", err.Error()) + } + if err := l.File.Validate(); err != nil { + return fmt.Errorf("`File` field: %s", err.Error()) + } + return nil +} + // SetFile set new file logger. func (l *logger) SetFile(flogger fileLogger) { l.Lock() diff --git a/cmd/notifier-config.go b/cmd/notifier-config.go index 2c9728158..531b4d016 100644 --- a/cmd/notifier-config.go +++ b/cmd/notifier-config.go @@ -16,7 +16,10 @@ package cmd -import "sync" +import ( + "fmt" + "sync" +) // Notifier represents collection of supported notification queues. type notifier struct { @@ -41,6 +44,15 @@ func (a amqpConfigs) Clone() amqpConfigs { return a2 } +func (a amqpConfigs) Validate() error { + for k, v := range a { + if err := v.Validate(); err != nil { + return fmt.Errorf("AMQP [%s] configuration invalid: %s", k, err.Error()) + } + } + return nil +} + type natsConfigs map[string]natsNotify func (a natsConfigs) Clone() natsConfigs { @@ -51,6 +63,15 @@ func (a natsConfigs) Clone() natsConfigs { return a2 } +func (a natsConfigs) Validate() error { + for k, v := range a { + if err := v.Validate(); err != nil { + return fmt.Errorf("NATS [%s] configuration invalid: %s", k, err.Error()) + } + } + return nil +} + type elasticSearchConfigs map[string]elasticSearchNotify func (a elasticSearchConfigs) Clone() elasticSearchConfigs { @@ -61,6 +82,15 @@ func (a elasticSearchConfigs) Clone() elasticSearchConfigs { return a2 } +func (a elasticSearchConfigs) Validate() error { + for k, v := range a { + if err := v.Validate(); err != nil { + return fmt.Errorf("ElasticSearch [%s] configuration invalid: %s", k, err.Error()) + } + } + return nil +} + type redisConfigs map[string]redisNotify func (a redisConfigs) Clone() redisConfigs { @@ -71,6 +101,15 @@ func (a redisConfigs) Clone() redisConfigs { return a2 } +func (a redisConfigs) Validate() error { + for k, v := range a { + if err := v.Validate(); err != nil { + return fmt.Errorf("Redis [%s] configuration invalid: %s", k, err.Error()) + } + } + return nil +} + type postgreSQLConfigs map[string]postgreSQLNotify func (a postgreSQLConfigs) Clone() postgreSQLConfigs { @@ -81,6 +120,15 @@ func (a postgreSQLConfigs) Clone() postgreSQLConfigs { return a2 } +func (a postgreSQLConfigs) Validate() error { + for k, v := range a { + if err := v.Validate(); err != nil { + return fmt.Errorf("PostgreSQL [%s] configuration invalid: %s", k, err.Error()) + } + } + return nil +} + type kafkaConfigs map[string]kafkaNotify func (a kafkaConfigs) Clone() kafkaConfigs { @@ -91,6 +139,15 @@ func (a kafkaConfigs) Clone() kafkaConfigs { return a2 } +func (a kafkaConfigs) Validate() error { + for k, v := range a { + if err := v.Validate(); err != nil { + return fmt.Errorf("Kafka [%s] configuration invalid: %s", k, err.Error()) + } + } + return nil +} + type webhookConfigs map[string]webhookNotify func (a webhookConfigs) Clone() webhookConfigs { @@ -101,6 +158,43 @@ func (a webhookConfigs) Clone() webhookConfigs { return a2 } +func (a webhookConfigs) Validate() error { + for k, v := range a { + if err := v.Validate(); err != nil { + return fmt.Errorf("Webhook [%s] configuration invalid: %s", k, err.Error()) + } + } + return nil +} + +func (n *notifier) Validate() error { + if n == nil { + return nil + } + if err := n.AMQP.Validate(); err != nil { + return err + } + if err := n.NATS.Validate(); err != nil { + return err + } + if err := n.ElasticSearch.Validate(); err != nil { + return err + } + if err := n.Redis.Validate(); err != nil { + return err + } + if err := n.PostgreSQL.Validate(); err != nil { + return err + } + if err := n.Kafka.Validate(); err != nil { + return err + } + if err := n.Webhook.Validate(); err != nil { + return err + } + return nil +} + func (n *notifier) SetAMQPByID(accountID string, amqpn amqpNotify) { n.Lock() defer n.Unlock() diff --git a/cmd/notify-amqp.go b/cmd/notify-amqp.go index f3b09379c..8e3b4652e 100644 --- a/cmd/notify-amqp.go +++ b/cmd/notify-amqp.go @@ -40,6 +40,16 @@ type amqpNotify struct { AutoDeleted bool `json:"autoDeleted"` } +func (a *amqpNotify) Validate() error { + if !a.Enable { + return nil + } + if _, err := checkNetURL(a.URL); err != nil { + return err + } + return nil +} + type amqpConn struct { params amqpNotify *amqp.Connection diff --git a/cmd/notify-elasticsearch.go b/cmd/notify-elasticsearch.go index 508b7ed9e..4773feaf2 100644 --- a/cmd/notify-elasticsearch.go +++ b/cmd/notify-elasticsearch.go @@ -33,6 +33,16 @@ type elasticSearchNotify struct { Index string `json:"index"` } +func (e *elasticSearchNotify) Validate() error { + if !e.Enable { + return nil + } + if _, err := checkNetURL(e.URL); err != nil { + return err + } + return nil +} + type elasticClient struct { *elastic.Client params elasticSearchNotify diff --git a/cmd/notify-kafka.go b/cmd/notify-kafka.go index 7ddf454b8..3ce4112d2 100644 --- a/cmd/notify-kafka.go +++ b/cmd/notify-kafka.go @@ -17,6 +17,7 @@ package cmd import ( + "errors" "fmt" "io/ioutil" @@ -39,6 +40,16 @@ type kafkaNotify struct { Topic string `json:"topic"` } +func (k *kafkaNotify) Validate() error { + if !k.Enable { + return nil + } + if len(k.Brokers) == 0 { + return errors.New("No broker specified") + } + return nil +} + // kafkaConn contains the active connection to the Kafka cluster and // the topic to send event notifications to. type kafkaConn struct { diff --git a/cmd/notify-nats.go b/cmd/notify-nats.go index b18751ded..8fe0654d7 100644 --- a/cmd/notify-nats.go +++ b/cmd/notify-nats.go @@ -48,6 +48,16 @@ type natsNotify struct { Streaming natsNotifyStreaming `json:"streaming"` } +func (n *natsNotify) Validate() error { + if !n.Enable { + return nil + } + if _, err := checkNetURL(n.Address); err != nil { + return err + } + return nil +} + // natsIOConn abstracts connection to any type of NATS server type natsIOConn struct { params natsNotify diff --git a/cmd/notify-postgresql.go b/cmd/notify-postgresql.go index 116092520..1ea092f5b 100644 --- a/cmd/notify-postgresql.go +++ b/cmd/notify-postgresql.go @@ -84,6 +84,16 @@ type postgreSQLNotify struct { Database string `json:"database"` } +func (p *postgreSQLNotify) Validate() error { + if !p.Enable { + return nil + } + if _, err := checkNetURL(p.Host); err != nil { + return err + } + return nil +} + type pgConn struct { connStr string table string diff --git a/cmd/notify-redis.go b/cmd/notify-redis.go index 8e382edb5..b4730e404 100644 --- a/cmd/notify-redis.go +++ b/cmd/notify-redis.go @@ -32,6 +32,16 @@ type redisNotify struct { Key string `json:"key"` } +func (r *redisNotify) Validate() error { + if !r.Enable { + return nil + } + if _, err := checkNetURL(r.Addr); err != nil { + return err + } + return nil +} + type redisConn struct { *redis.Pool params redisNotify diff --git a/cmd/notify-webhook.go b/cmd/notify-webhook.go index 7bdb8f086..b028c8e4e 100644 --- a/cmd/notify-webhook.go +++ b/cmd/notify-webhook.go @@ -32,6 +32,16 @@ type webhookNotify struct { Endpoint string `json:"endpoint"` } +func (w *webhookNotify) Validate() error { + if !w.Enable { + return nil + } + if _, err := checkNetURL(w.Endpoint); err != nil { + return err + } + return nil +} + type httpConn struct { *http.Client Endpoint string diff --git a/cmd/server-main.go b/cmd/server-main.go index 6f44624ca..5f91e4de3 100644 --- a/cmd/server-main.go +++ b/cmd/server-main.go @@ -127,7 +127,7 @@ func initConfig() { // Config file does not exist, we create it fresh and return upon success. if !isConfigFileExists() { if err := newConfig(envs); err != nil { - console.Fatalf("Unable to initialize minio config for the first time. Err: %s.\n", err) + console.Fatalf("Unable to initialize minio config for the first time. Error: %s.\n", err) } console.Println("Created minio configuration file successfully at " + getConfigDir()) return @@ -136,9 +136,14 @@ func initConfig() { // Migrate any old version of config / state files to newer format. migrate() + // Validate config file + if err := validateConfig(); err != nil { + console.Fatalf("Cannot validate configuration file. Error: %s\n", err) + } + // Once we have migrated all the old config, now load them. if err := loadConfig(envs); err != nil { - console.Fatalf("Unable to initialize minio config. Err: %s.\n", err) + console.Fatalf("Unable to initialize minio config. Error: %s.\n", err) } } diff --git a/cmd/utils.go b/cmd/utils.go index 3eeedae09..4d8512408 100644 --- a/cmd/utils.go +++ b/cmd/utils.go @@ -285,3 +285,16 @@ func isFile(path string) bool { return false } + +// checkNetURL - checks if passed address correspond +// to a network address (and not file system path) +func checkNetURL(address string) (*url.URL, error) { + u, err := url.Parse(address) + if err != nil { + return nil, fmt.Errorf("`%s` invalid: %s", address, err.Error()) + } + if u.Host == "" { + return nil, fmt.Errorf("`%s` invalid network URL", address) + } + return u, nil +} diff --git a/vendor/github.com/tidwall/gjson/README.md b/vendor/github.com/tidwall/gjson/README.md index 1ee5ae3dd..b38f920bf 100644 --- a/vendor/github.com/tidwall/gjson/README.md +++ b/vendor/github.com/tidwall/gjson/README.md @@ -11,7 +11,7 @@

get a json value quickly

-GJSON is a Go package the provides a [very fast](#performance) and simple way to get a value from a json document. The reason for this library it to give efficient json indexing for the [BuntDB](https://github.com/tidwall/buntdb) project. +GJSON is a Go package that provides a [very fast](#performance) and simple way to get a value from a json document. The purpose for this library it to give efficient json indexing for the [BuntDB](https://github.com/tidwall/buntdb) project. Getting Started =============== @@ -27,7 +27,7 @@ $ go get -u github.com/tidwall/gjson This will retrieve the library. ## Get a value -Get searches json for the specified path. A path is in dot syntax, such as "name.last" or "age". This function expects that the json is well-formed and validates. Invalid json will not panic, but it may return back unexpected results. When the value is found it's returned immediately. +Get searches json for the specified path. A path is in dot syntax, such as "name.last" or "age". This function expects that the json is well-formed and validates. Invalid json will not panic, but it may return back unexpected results. When the value is found it's returned immediately. ```go package main @@ -47,6 +47,7 @@ This will print: ``` Prichard ``` +*There's also the [GetMany](#get-multiple-values-at-once) function to get multiple values at once, and [GetBytes](#working-with-bytes) for working with JSON byte slices.* ## Path Syntax @@ -63,25 +64,33 @@ The dot and wildcard characters can be escaped with '\'. "children": ["Sara","Alex","Jack"], "fav.movie": "Deer Hunter", "friends": [ - {"first": "James", "last": "Murphy"}, - {"first": "Roger", "last": "Craig"} + {"first": "Dale", "last": "Murphy", "age": 44}, + {"first": "Roger", "last": "Craig", "age": 68}, + {"first": "Jane", "last": "Murphy", "age": 47} ] } ``` ``` "name.last" >> "Anderson" "age" >> 37 +"children" >> ["Sara","Alex","Jack"] "children.#" >> 3 "children.1" >> "Alex" "child*.2" >> "Jack" "c?ildren.0" >> "Sara" "fav\.movie" >> "Deer Hunter" -"friends.#.first" >> [ "James", "Roger" ] +"friends.#.first" >> ["Dale","Roger","Jane"] "friends.1.last" >> "Craig" ``` -To query an array: + +You can also query an array for the first match by using `#[...]`, or find all matches with `#[...]#`. +Queries support the `==`, `!=`, `<`, `<=`, `>`, `>=` comparison operators and the simple pattern matching `%` operator. + ``` -`friends.#[last="Murphy"].first` >> "James" +friends.#[last=="Murphy"].first >> "Dale" +friends.#[last=="Murphy"]#.first >> ["Dale","Jane"] +friends.#[age>45]#.last >> ["Craig","Murphy"] +friends.#[first%"D*"].last >> "Murphy" ``` ## Result Type @@ -105,7 +114,7 @@ result.Type // can be String, Number, True, False, Null, or JSON result.Str // holds the string result.Num // holds the float64 number result.Raw // holds the raw json -result.Multi // holds nested array values +result.Index // index of raw value in original json, zero means index unknown ``` There are a variety of handy functions that work on a result: @@ -113,16 +122,25 @@ There are a variety of handy functions that work on a result: ```go result.Value() interface{} result.Int() int64 +result.Uint() uint64 result.Float() float64 result.String() string result.Bool() bool result.Array() []gjson.Result result.Map() map[string]gjson.Result result.Get(path string) Result +result.ForEach(iterator func(key, value Result) bool) +result.Less(token Result, caseSensitive bool) bool ``` The `result.Value()` function returns an `interface{}` which requires type assertion and is one of the following Go types: + + +The `result.Array()` function returns back an array of values. +If the result represents a non-existent value, then an empty array will be returned. +If the result is not a JSON array, the return value will be an array containing one result. + ```go boolean >> bool number >> float64 @@ -169,6 +187,20 @@ name := gjson.Get(json, `programmers.#[lastName="Hunter"].firstName`) println(name.String()) // prints "Elliotte" ``` +## Iterate through an object or array + +The `ForEach` function allows for quickly iterating through an object or array. +The key and value are passed to the iterator function for objects. +Only the value is passed for arrays. +Returning `false` from an iterator will stop iteration. + +```go +result := gjson.Get(json, "programmers") +result.ForEach(func(key, value gjson.Result) bool{ + println(value.String()) + return true // keep iterating +}) +``` ## Simple Parse and Get @@ -184,7 +216,7 @@ gjson.Get(json, "name.last") ## Check for the existence of a value -Sometimes you just want to know you if a value exists. +Sometimes you just want to know if a value exists. ```go value := gjson.Get(json, "name.last") @@ -211,6 +243,40 @@ if !ok{ } ``` +## Working with Bytes + +If your JSON is contained in a `[]byte` slice, there's the [GetBytes](https://godoc.org/github.com/tidwall/gjson#GetBytes) function. This is preferred over `Get(string(data), path)`. + +```go +var json []byte = ... +result := gjson.GetBytes(json, path) +``` + +If you are using the `gjson.GetBytes(json, path)` function and you want to avoid converting `result.Raw` to a `[]byte`, then you can use this pattern: + +```go +var json []byte = ... +result := gjson.GetBytes(json, path) +var raw []byte +if result.Index > 0 { + raw = json[result.Index:result.Index+len(result.Raw)] +} else { + raw = []byte(result.Raw) +} +``` + +This is a best-effort no allocation sub slice of the original json. This method utilizes the `result.Index` field, which is the position of the raw data in the original json. It's possible that the value of `result.Index` equals zero, in which case the `result.Raw` is converted to a `[]byte`. + +## Get multiple values at once + +The `GetMany` function can be used to get multiple values at the same time, and is optimized to scan over a JSON payload once. + +```go +results := gjson.GetMany(json, "name.first", "name.last", "age") +``` + +The return value is a `[]Result`, which will always contain exactly the same number of items as the input paths. + ## Performance Benchmarks of GJSON alongside [encoding/json](https://golang.org/pkg/encoding/json/), @@ -229,6 +295,17 @@ BenchmarkEasyJSONLexer-8 3000000 938 ns/op 613 B/op BenchmarkJSONParserGet-8 3000000 442 ns/op 21 B/op 0 allocs/op ``` +Benchmarks for the `GetMany` function: + +``` +BenchmarkGJSONGetMany4Paths-8 4000000 319 ns/op 112 B/op 0 allocs/op +BenchmarkGJSONGetMany8Paths-8 8000000 218 ns/op 56 B/op 0 allocs/op +BenchmarkGJSONGetMany16Paths-8 16000000 160 ns/op 56 B/op 0 allocs/op +BenchmarkGJSONGetMany32Paths-8 32000000 130 ns/op 64 B/op 0 allocs/op +BenchmarkGJSONGetMany64Paths-8 64000000 117 ns/op 64 B/op 0 allocs/op +BenchmarkGJSONGetMany128Paths-8 128000000 109 ns/op 64 B/op 0 allocs/op +``` + JSON document used: ```json @@ -267,6 +344,20 @@ widget.image.hOffset widget.text.onMouseUp ``` +For the `GetMany` benchmarks these paths are used: + +``` +widget.window.name +widget.image.hOffset +widget.text.onMouseUp +widget.window.title +widget.image.alignment +widget.text.style +widget.window.height +widget.image.src +widget.text.data +widget.text.size +``` *These benchmarks were run on a MacBook Pro 15" 2.8 GHz Intel Core i7 using Go 1.7.* diff --git a/vendor/github.com/tidwall/gjson/gjson.go b/vendor/github.com/tidwall/gjson/gjson.go index 5ad877455..9b28df2ca 100644 --- a/vendor/github.com/tidwall/gjson/gjson.go +++ b/vendor/github.com/tidwall/gjson/gjson.go @@ -4,6 +4,10 @@ package gjson import ( "reflect" "strconv" + + // It's totally safe to use this package, but in case your + // project or organization restricts the use of 'unsafe', + // there's the "github.com/tidwall/gjson-safe" package. "unsafe" "github.com/tidwall/match" @@ -27,6 +31,26 @@ const ( JSON ) +// String returns a string representation of the type. +func (t Type) String() string { + switch t { + default: + return "" + case Null: + return "Null" + case False: + return "False" + case Number: + return "Number" + case String: + return "String" + case True: + return "True" + case JSON: + return "JSON" + } +} + // Result represents a json value that is returned from Get(). type Result struct { // Type is the json type @@ -37,6 +61,8 @@ type Result struct { Str string // Num is the json number Num float64 + // Index of raw value in original json, zero means index unknown + Index int } // String returns a string representation of the value. @@ -86,6 +112,21 @@ func (t Result) Int() int64 { } } +// Uint returns an unsigned integer representation. +func (t Result) Uint() uint64 { + switch t.Type { + default: + return 0 + case True: + return 1 + case String: + n, _ := strconv.ParseUint(t.Str, 10, 64) + return n + case Number: + return uint64(t.Num) + } +} + // Float returns an float64 representation. func (t Result) Float() float64 { switch t.Type { @@ -101,16 +142,91 @@ func (t Result) Float() float64 { } } -// Array returns back an array of children. The result must be a JSON array. +// Array returns back an array of values. +// If the result represents a non-existent value, then an empty array will be returned. +// If the result is not a JSON array, the return value will be an array containing one result. func (t Result) Array() []Result { - if t.Type != JSON { + if !t.Exists() { return nil } + if t.Type != JSON { + return []Result{t} + } r := t.arrayOrMap('[', false) return r.a } -// Map returns back an map of children. The result should be a JSON array. +// ForEach iterates through values. +// If the result represents a non-existent value, then no values will be iterated. +// If the result is an Object, the iterator will pass the key and value of each item. +// If the result is an Array, the iterator will only pass the value of each item. +// If the result is not a JSON array or object, the iterator will pass back one value equal to the result. +func (t Result) ForEach(iterator func(key, value Result) bool) { + if !t.Exists() { + return + } + if t.Type != JSON { + iterator(Result{}, t) + return + } + json := t.Raw + var keys bool + var i int + var key, value Result + for ; i < len(json); i++ { + if json[i] == '{' { + i++ + key.Type = String + keys = true + break + } else if json[i] == '[' { + i++ + break + } + if json[i] > ' ' { + return + } + } + var str string + var vesc bool + var ok bool + for ; i < len(json); i++ { + if keys { + if json[i] != '"' { + continue + } + s := i + i, str, vesc, ok = parseString(json, i+1) + if !ok { + return + } + if vesc { + key.Str = unescape(str[1 : len(str)-1]) + } else { + key.Str = str[1 : len(str)-1] + } + key.Raw = str + key.Index = s + } + for ; i < len(json); i++ { + if json[i] <= ' ' || json[i] == ',' || json[i] == ':' { + continue + } + break + } + s := i + i, value, ok = parseAny(json, i, true) + if !ok { + return + } + value.Index = s + if !iterator(key, value) { + return + } + } +} + +// Map returns back an map of values. The result should be a JSON array. func (t Result) Map() map[string]Result { if t.Type != JSON { return map[string]Result{} @@ -232,7 +348,7 @@ end: return } -// Parse parses the json and returns a result +// Parse parses the json and returns a result. func Parse(json string) Result { var value Result for i := 0; i < len(json); i++ { @@ -270,6 +386,12 @@ func Parse(json string) Result { return value } +// ParseBytes parses the json and returns a result. +// If working with bytes, this method preferred over Parse(string(data)) +func ParseBytes(json []byte) Result { + return Parse(string(json)) +} + func squash(json string) string { // expects that the lead character is a '[' or '{' // squash the value, ignoring all nested arrays and objects. @@ -387,7 +509,13 @@ func tostr(json string) (raw string, str string) { break } } - return json[:i+1], unescape(json[1:i]) + var ret string + if i+1 < len(json) { + ret = json[:i+1] + } else { + ret = json[:i] + } + return ret, unescape(json[1:i]) } } return json, json[1:] @@ -506,6 +634,7 @@ type arrayPathResult struct { path string op string value string + all bool } } @@ -536,8 +665,12 @@ func parseArrayPath(path string) (r arrayPathResult) { } s := i for ; i < len(path); i++ { - if path[i] <= ' ' || path[i] == '=' || - path[i] == '<' || path[i] == '>' || + if path[i] <= ' ' || + path[i] == '!' || + path[i] == '=' || + path[i] == '<' || + path[i] == '>' || + path[i] == '%' || path[i] == ']' { break } @@ -551,7 +684,11 @@ func parseArrayPath(path string) (r arrayPathResult) { } if i < len(path) { s = i - if path[i] == '<' || path[i] == '>' { + if path[i] == '!' { + if i < len(path)-1 && path[i+1] == '=' { + i++ + } + } else if path[i] == '<' || path[i] == '>' { if i < len(path)-1 && path[i+1] == '=' { i++ } @@ -596,6 +733,9 @@ func parseArrayPath(path string) (r arrayPathResult) { } } } else if path[i] == ']' { + if i+1 < len(path) && path[i+1] == '#' { + r.query.all = true + } break } } @@ -877,6 +1017,8 @@ func queryMatches(rp *arrayPathResult, value Result) bool { switch rp.query.op { case "=": return value.Str == rpv + case "!=": + return value.Str != rpv case "<": return value.Str < rpv case "<=": @@ -885,12 +1027,16 @@ func queryMatches(rp *arrayPathResult, value Result) bool { return value.Str > rpv case ">=": return value.Str >= rpv + case "%": + return match.Match(value.Str, rpv) } case Number: rpvn, _ := strconv.ParseFloat(rpv, 64) switch rp.query.op { case "=": return value.Num == rpvn + case "!=": + return value.Num == rpvn case "<": return value.Num < rpvn case "<=": @@ -904,6 +1050,8 @@ func queryMatches(rp *arrayPathResult, value Result) bool { switch rp.query.op { case "=": return rpv == "true" + case "!=": + return rpv != "true" case ">": return rpv == "false" case ">=": @@ -913,6 +1061,8 @@ func queryMatches(rp *arrayPathResult, value Result) bool { switch rp.query.op { case "=": return rpv == "false" + case "!=": + return rpv != "false" case "<": return rpv == "true" case "<=": @@ -927,6 +1077,7 @@ func parseArray(c *parseContext, i int, path string) (int, bool) { var h int var alog []int var partidx int + var multires []byte rp := parseArrayPath(path) if !rp.arrch { n, err := strconv.ParseUint(rp.part, 10, 64) @@ -983,12 +1134,21 @@ func parseArray(c *parseContext, i int, path string) (int, bool) { res := Get(val, rp.query.path) if queryMatches(&rp, res) { if rp.more { - c.value = Get(val, rp.path) + res = Get(val, rp.path) } else { - c.value.Raw = val - c.value.Type = JSON + res = Result{Raw: val, Type: JSON} + } + if rp.query.all { + if len(multires) == 0 { + multires = append(multires, '[') + } else { + multires = append(multires, ',') + } + multires = append(multires, res.Raw...) + } else { + c.value = res + return i, true } - return i, true } } else if hit { if rp.alogok { @@ -1051,13 +1211,14 @@ func parseArray(c *parseContext, i int, path string) (int, bool) { if rp.alogok { var jsons = make([]byte, 0, 64) jsons = append(jsons, '[') - for j := 0; j < len(alog); j++ { + for j, k := 0, 0; j < len(alog); j++ { res := Get(c.json[alog[j]:], rp.alogkey) if res.Exists() { - if j > 0 { + if k > 0 { jsons = append(jsons, ',') } jsons = append(jsons, []byte(res.Raw)...) + k++ } } jsons = append(jsons, ']') @@ -1071,9 +1232,16 @@ func parseArray(c *parseContext, i int, path string) (int, bool) { c.value.Raw = val c.value.Type = Number c.value.Num = float64(h - 1) + c.calcd = true return i + 1, true } } + if len(multires) > 0 && !c.value.Exists() { + c.value = Result{ + Raw: string(append(multires, ']')), + Type: JSON, + } + } return i + 1, false } break @@ -1085,6 +1253,7 @@ func parseArray(c *parseContext, i int, path string) (int, bool) { type parseContext struct { json string value Result + calcd bool } // Get searches json for the specified path. @@ -1093,7 +1262,7 @@ type parseContext struct { // Invalid json will not panic, but it may return back unexpected results. // When the value is found it's returned immediately. // -// A path is a series of keys seperated by a dot. +// A path is a series of keys searated by a dot. // A key may contain special wildcard characters '*' and '?'. // To access an array value use the index as the key. // To get the number of elements in an array or to access a child path, use the '#' character. @@ -1110,11 +1279,12 @@ type parseContext struct { // } // "name.last" >> "Anderson" // "age" >> 37 +// "children" >> ["Sara","Alex","Jack"] // "children.#" >> 3 // "children.1" >> "Alex" // "child*.2" >> "Jack" // "c?ildren.0" >> "Sara" -// "friends.#.first" >> [ "James", "Roger" ] +// "friends.#.first" >> ["James","Roger"] // func Get(json, path string) Result { var i int @@ -1131,8 +1301,53 @@ func Get(json, path string) Result { break } } + if len(c.value.Raw) > 0 && !c.calcd { + jhdr := *(*reflect.StringHeader)(unsafe.Pointer(&json)) + rhdr := *(*reflect.StringHeader)(unsafe.Pointer(&(c.value.Raw))) + c.value.Index = int(rhdr.Data - jhdr.Data) + if c.value.Index < 0 || c.value.Index >= len(json) { + c.value.Index = 0 + } + } return c.value } +func fromBytesGet(result Result) Result { + // safely get the string headers + rawhi := *(*reflect.StringHeader)(unsafe.Pointer(&result.Raw)) + strhi := *(*reflect.StringHeader)(unsafe.Pointer(&result.Str)) + // create byte slice headers + rawh := reflect.SliceHeader{Data: rawhi.Data, Len: rawhi.Len} + strh := reflect.SliceHeader{Data: strhi.Data, Len: strhi.Len} + if strh.Data == 0 { + // str is nil + if rawh.Data == 0 { + // raw is nil + result.Raw = "" + } else { + // raw has data, safely copy the slice header to a string + result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh))) + } + result.Str = "" + } else if rawh.Data == 0 { + // raw is nil + result.Raw = "" + // str has data, safely copy the slice header to a string + result.Str = string(*(*[]byte)(unsafe.Pointer(&strh))) + } else if strh.Data >= rawh.Data && + int(strh.Data)+strh.Len <= int(rawh.Data)+rawh.Len { + // Str is a substring of Raw. + start := int(strh.Data - rawh.Data) + // safely copy the raw slice header + result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh))) + // substring the raw + result.Str = result.Raw[start : start+strh.Len] + } else { + // safely copy both the raw and str slice headers to strings + result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh))) + result.Str = string(*(*[]byte)(unsafe.Pointer(&strh))) + } + return result +} // GetBytes searches json for the specified path. // If working with bytes, this method preferred over Get(string(data), path) @@ -1141,29 +1356,7 @@ func GetBytes(json []byte, path string) Result { if json != nil { // unsafe cast to string result = Get(*(*string)(unsafe.Pointer(&json)), path) - // copy of string data for safety. - rawh := *(*reflect.SliceHeader)(unsafe.Pointer(&result.Raw)) - strh := *(*reflect.SliceHeader)(unsafe.Pointer(&result.Str)) - if strh.Data == 0 { - if rawh.Data == 0 { - result.Raw = "" - } else { - result.Raw = string(*(*[]byte)(unsafe.Pointer(&result.Raw))) - } - result.Str = "" - } else if rawh.Data == 0 { - result.Raw = "" - result.Str = string(*(*[]byte)(unsafe.Pointer(&result.Str))) - } else if strh.Data >= rawh.Data && - int(strh.Data)+strh.Len <= int(rawh.Data)+rawh.Len { - // Str is a substring of Raw. - start := int(strh.Data - rawh.Data) - result.Raw = string(*(*[]byte)(unsafe.Pointer(&result.Raw))) - result.Str = result.Raw[start : start+strh.Len] - } else { - result.Raw = string(*(*[]byte)(unsafe.Pointer(&result.Raw))) - result.Str = string(*(*[]byte)(unsafe.Pointer(&result.Str))) - } + result = fromBytesGet(result) } return result } @@ -1289,3 +1482,465 @@ func stringLessInsensitive(a, b string) bool { } return len(a) < len(b) } + +// parseAny parses the next value from a json string. +// A Result is returned when the hit param is set. +// The return values are (i int, res Result, ok bool) +func parseAny(json string, i int, hit bool) (int, Result, bool) { + var res Result + var val string + for ; i < len(json); i++ { + if json[i] == '{' || json[i] == '[' { + i, val = parseSquash(json, i) + if hit { + res.Raw = val + res.Type = JSON + } + return i, res, true + } + if json[i] <= ' ' { + continue + } + switch json[i] { + case '"': + i++ + var vesc bool + var ok bool + i, val, vesc, ok = parseString(json, i) + if !ok { + return i, res, false + } + if hit { + res.Type = String + res.Raw = val + if vesc { + res.Str = unescape(val[1 : len(val)-1]) + } else { + res.Str = val[1 : len(val)-1] + } + } + return i, res, true + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + i, val = parseNumber(json, i) + if hit { + res.Raw = val + res.Type = Number + res.Num, _ = strconv.ParseFloat(val, 64) + } + return i, res, true + case 't', 'f', 'n': + vc := json[i] + i, val = parseLiteral(json, i) + if hit { + res.Raw = val + switch vc { + case 't': + res.Type = True + case 'f': + res.Type = False + } + return i, res, true + } + } + } + return i, res, false +} + +var ( // used for testing + testWatchForFallback bool + testLastWasFallback bool +) + +// areSimplePaths returns true if all the paths are simple enough +// to parse quickly for GetMany(). Allows alpha-numeric, dots, +// underscores, and the dollar sign. It does not allow non-alnum, +// escape characters, or keys which start with a numbers. +// For example: +// "name.last" == OK +// "user.id0" == OK +// "user.ID" == OK +// "user.first_name" == OK +// "user.firstName" == OK +// "user.0item" == BAD +// "user.#id" == BAD +// "user\.name" == BAD +func areSimplePaths(paths []string) bool { + for _, path := range paths { + var fi int // first key index, for keys with numeric prefix + for i := 0; i < len(path); i++ { + if path[i] >= 'a' && path[i] <= 'z' { + // a-z is likely to be the highest frequency charater. + continue + } + if path[i] == '.' { + fi = i + 1 + continue + } + if path[i] >= 'A' && path[i] <= 'Z' { + continue + } + if path[i] == '_' || path[i] == '$' { + continue + } + if i > fi && path[i] >= '0' && path[i] <= '9' { + continue + } + return false + } + } + return true +} + +// GetMany searches json for the multiple paths. +// The return value is a Result array where the number of items +// will be equal to the number of input paths. +func GetMany(json string, paths ...string) []Result { + if len(paths) < 4 { + if testWatchForFallback { + testLastWasFallback = false + } + switch len(paths) { + case 0: + // return nil when no paths are specified. + return nil + case 1: + return []Result{Get(json, paths[0])} + case 2: + return []Result{Get(json, paths[0]), Get(json, paths[1])} + case 3: + return []Result{Get(json, paths[0]), Get(json, paths[1]), Get(json, paths[2])} + } + } + var results []Result + var ok bool + var i int + if len(paths) > 512 { + // we can only support up to 512 paths. Is that too many? + goto fallback + } + if !areSimplePaths(paths) { + // If there is even one path that is not considered "simple" then + // we need to use the fallback method. + goto fallback + } + // locate the object token. + for ; i < len(json); i++ { + if json[i] == '{' { + i++ + break + } + if json[i] <= ' ' { + continue + } + goto fallback + } + // use the call function table. + if len(paths) <= 8 { + results, ok = getMany8(json, i, paths) + } else if len(paths) <= 16 { + results, ok = getMany16(json, i, paths) + } else if len(paths) <= 32 { + results, ok = getMany32(json, i, paths) + } else if len(paths) <= 64 { + results, ok = getMany64(json, i, paths) + } else if len(paths) <= 128 { + results, ok = getMany128(json, i, paths) + } else if len(paths) <= 256 { + results, ok = getMany256(json, i, paths) + } else if len(paths) <= 512 { + results, ok = getMany512(json, i, paths) + } + if !ok { + // there was some fault while parsing. we should try the + // fallback method. This could result in performance + // degregation in some cases. + goto fallback + } + if testWatchForFallback { + testLastWasFallback = false + } + return results +fallback: + results = results[:0] + for i := 0; i < len(paths); i++ { + results = append(results, Get(json, paths[i])) + } + if testWatchForFallback { + testLastWasFallback = true + } + return results +} + +// GetManyBytes searches json for the specified path. +// If working with bytes, this method preferred over +// GetMany(string(data), paths...) +func GetManyBytes(json []byte, paths ...string) []Result { + if json == nil { + return GetMany("", paths...) + } + results := GetMany(*(*string)(unsafe.Pointer(&json)), paths...) + for i := range results { + results[i] = fromBytesGet(results[i]) + } + return results +} + +// parseGetMany parses a json object for keys that match against the callers +// paths. It's a best-effort attempt and quickly locating and assigning the +// values to the []Result array. If there are failures such as bad json, or +// invalid input paths, or too much recursion, the function will exit with a +// return value of 'false'. +func parseGetMany( + json string, i int, + level uint, kplen int, + paths []string, completed []bool, matches []uint64, results []Result, +) (int, bool) { + if level > 62 { + // The recursion level is limited because the matches []uint64 + // array cannot handle more the 64-bits. + return i, false + } + // At this point the last character read was a '{'. + // Read all object keys and try to match against the paths. + var key string + var val string + var vesc, ok bool +next_key: + for ; i < len(json); i++ { + if json[i] == '"' { + // read the key + i, val, vesc, ok = parseString(json, i+1) + if !ok { + return i, false + } + if vesc { + // the value is escaped + key = unescape(val[1 : len(val)-1]) + } else { + // just a plain old ascii key + key = val[1 : len(val)-1] + } + var hasMatch bool + var parsedVal bool + var valOrgIndex int + var valPathIndex int + for j := 0; j < len(key); j++ { + if key[j] == '.' { + // we need to look for keys with dot and ignore them. + if i, _, ok = parseAny(json, i, false); !ok { + return i, false + } + continue next_key + } + } + var usedPaths int + // loop through paths and look for matches + for j := 0; j < len(paths); j++ { + if completed[j] { + usedPaths++ + // ignore completed paths + continue + } + if level > 0 && (matches[j]>>(level-1))&1 == 0 { + // ignore unmatched paths + usedPaths++ + continue + } + + // try to match the key to the path + // this is spaghetti code but the idea is to minimize + // calls and variable assignments when comparing the + // key to paths + if len(paths[j])-kplen >= len(key) { + i, k := kplen, 0 + for ; k < len(key); k, i = k+1, i+1 { + if key[k] != paths[j][i] { + // no match + goto nomatch + } + } + if i < len(paths[j]) { + if paths[j][i] == '.' { + // matched, but there still more keys in the path + goto match_not_atend + } + } + // matched and at the end of the path + goto match_atend + } + // no match, jump to the nomatch label + goto nomatch + match_atend: + // found a match + // at the end of the path. we must take the value. + usedPaths++ + if !parsedVal { + // the value has not been parsed yet. let's do so. + valOrgIndex = i // keep track of the current position. + i, results[j], ok = parseAny(json, i, true) + if !ok { + return i, false + } + parsedVal = true + valPathIndex = j + } else { + results[j] = results[valPathIndex] + } + // mark as complete + completed[j] = true + // jump over the match_not_atend label + goto nomatch + match_not_atend: + // found a match + // still in the middle of the path. + usedPaths++ + // mark the path as matched + matches[j] |= 1 << level + if !hasMatch { + hasMatch = true + } + nomatch: // noop label + } + + if !parsedVal { + if hasMatch { + // we found a match and the value has not been parsed yet. + // let's find out if the next value type is an object. + for ; i < len(json); i++ { + if json[i] <= ' ' || json[i] == ':' { + continue + } + break + } + if i < len(json) { + if json[i] == '{' { + // it's an object. let's go deeper + i, ok = parseGetMany(json, i+1, level+1, kplen+len(key)+1, paths, completed, matches, results) + if !ok { + return i, false + } + } else { + // not an object. just parse and ignore. + if i, _, ok = parseAny(json, i, false); !ok { + return i, false + } + } + } + } else { + // Since there was no matches we can just parse the value and + // ignore the result. + if i, _, ok = parseAny(json, i, false); !ok { + return i, false + } + } + } else if hasMatch && len(results[valPathIndex].Raw) > 0 && results[valPathIndex].Raw[0] == '{' { + // The value was already parsed and the value type is an object. + // Rewind the json index and let's parse deeper. + i = valOrgIndex + for ; i < len(json); i++ { + if json[i] == '{' { + break + } + } + i, ok = parseGetMany(json, i+1, level+1, kplen+len(key)+1, paths, completed, matches, results) + if !ok { + return i, false + } + } + if usedPaths == len(paths) { + // all paths have been used, either completed or matched. + // we should stop parsing this object to save CPU cycles. + if level > 0 && i < len(json) { + i, _ = parseSquash(json, i) + } + return i, true + } + } else if json[i] == '}' { + // reached the end of the object. end it here. + return i + 1, true + } + } + return i, true +} + +// Call table for GetMany. Using an isolated function allows for allocating +// arrays with know capacities on the stack, as opposed to dynamically +// allocating on the heap. This can provide a tremendous performance boost +// by avoiding the GC. +func getMany8(json string, i int, paths []string) ([]Result, bool) { + const max = 8 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany16(json string, i int, paths []string) ([]Result, bool) { + const max = 16 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany32(json string, i int, paths []string) ([]Result, bool) { + const max = 32 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany64(json string, i int, paths []string) ([]Result, bool) { + const max = 64 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany128(json string, i int, paths []string) ([]Result, bool) { + const max = 128 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany256(json string, i int, paths []string) ([]Result, bool) { + const max = 256 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} +func getMany512(json string, i int, paths []string) ([]Result, bool) { + const max = 512 + var completed = make([]bool, 0, max) + var matches = make([]uint64, 0, max) + var results = make([]Result, 0, max) + completed = completed[0:len(paths):max] + matches = matches[0:len(paths):max] + results = results[0:len(paths):max] + _, ok := parseGetMany(json, i, 0, 0, paths, completed, matches, results) + return results, ok +} diff --git a/vendor/vendor.json b/vendor/vendor.json index 48432de87..645342f9c 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -296,10 +296,10 @@ "revisionTime": "2016-03-11T21:55:03Z" }, { - "checksumSHA1": "+Pcohsuq0Mi/y8bgaDFjb/CGzkk=", + "checksumSHA1": "k/Xh0p5L7+tBCXAL2dOCwUf9J3Y=", "path": "github.com/tidwall/gjson", - "revision": "7c631e98686a791e5fc60ff099512968122afb52", - "revisionTime": "2016-09-08T16:02:40Z" + "revision": "09d1c5c5bc64e094394dfe2150220d906c55ac37", + "revisionTime": "2017-02-05T16:10:42Z" }, { "checksumSHA1": "qmePMXEDYGwkAfT9QvtMC58JN/E=",