diff --git a/core/poller/.gitignore b/core/poller/.gitignore index 4c49bd78..3d8b3da5 100644 --- a/core/poller/.gitignore +++ b/core/poller/.gitignore @@ -1 +1,5 @@ .env +/unifi-poller +/*.1.gz +/*.1 +/vendor diff --git a/core/poller/Godeps/Godeps.json b/core/poller/Godeps/Godeps.json index 426a375a..2e53a112 100644 --- a/core/poller/Godeps/Godeps.json +++ b/core/poller/Godeps/Godeps.json @@ -1,24 +1,30 @@ { - "ImportPath": "github.com/dewski/unifi", - "GoVersion": "go1.6", + "ImportPath": "github.com/davidnewhall/unifi", + "GoVersion": "go1.10", + "GodepVersion": "v79", "Packages": [ "./..." ], "Deps": [ { "ImportPath": "github.com/influxdata/influxdb/client/v2", - "Comment": "v0.10.0-565-ga4b00ae", - "Rev": "a4b00aeeba630bda96db70bd9fc5eb2d4c0b7580" + "Comment": "v1.5.0-149-g14dcc5d", + "Rev": "14dcc5d6e7a6b15e17aba7b104b8ad0ca6c91ad2" }, { "ImportPath": "github.com/influxdata/influxdb/models", - "Comment": "v0.10.0-565-ga4b00ae", - "Rev": "a4b00aeeba630bda96db70bd9fc5eb2d4c0b7580" + "Comment": "v1.5.0-149-g14dcc5d", + "Rev": "14dcc5d6e7a6b15e17aba7b104b8ad0ca6c91ad2" }, { "ImportPath": "github.com/influxdata/influxdb/pkg/escape", - "Comment": "v0.10.0-565-ga4b00ae", - "Rev": "a4b00aeeba630bda96db70bd9fc5eb2d4c0b7580" + "Comment": "v1.5.0-149-g14dcc5d", + "Rev": "14dcc5d6e7a6b15e17aba7b104b8ad0ca6c91ad2" + }, + { + "ImportPath": "github.com/pkg/errors", + "Comment": "v0.8.0-6-g2b3a18b", + "Rev": "2b3a18b5f0fb6b4f9190549597d3f962c02bc5eb" } ] } diff --git a/core/poller/Godeps/_workspace/.gitignore b/core/poller/Godeps/_workspace/.gitignore deleted file mode 100644 index f037d684..00000000 --- a/core/poller/Godeps/_workspace/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/pkg -/bin diff --git a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/LICENSE b/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/LICENSE deleted file mode 100644 index 63cef79b..00000000 --- a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013-2016 Errplane Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/LICENSE_OF_DEPENDENCIES.md b/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/LICENSE_OF_DEPENDENCIES.md deleted file mode 100644 index f0794abc..00000000 --- a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/LICENSE_OF_DEPENDENCIES.md +++ /dev/null @@ -1,27 +0,0 @@ -# List -- bootstrap 3.3.5 [MIT LICENSE](https://github.com/twbs/bootstrap/blob/master/LICENSE) -- collectd.org [ISC LICENSE](https://github.com/collectd/go-collectd/blob/master/LICENSE) -- github.com/armon/go-metrics [MIT LICENSE](https://github.com/armon/go-metrics/blob/master/LICENSE) -- github.com/BurntSushi/toml [WTFPL LICENSE](https://github.com/BurntSushi/toml/blob/master/COPYING) -- github.com/bmizerany/pat [MIT LICENSE](https://github.com/bmizerany/pat#license) -- github.com/boltdb/bolt [MIT LICENSE](https://github.com/boltdb/bolt/blob/master/LICENSE) -- github.com/dgryski/go-bits [MIT LICENSE](https://github.com/dgryski/go-bits/blob/master/LICENSE) -- github.com/dgryski/go-bitstream [MIT LICENSE](https://github.com/dgryski/go-bitstream/blob/master/LICENSE) -- github.com/gogo/protobuf/proto [BSD LICENSE](https://github.com/gogo/protobuf/blob/master/LICENSE) -- github.com/davecgh/go-spew/spew [ISC LICENSE](https://github.com/davecgh/go-spew/blob/master/LICENSE) -- github.com/golang/snappy [BSD LICENSE](https://github.com/golang/snappy/blob/master/LICENSE) -- github.com/hashicorp/go-msgpack [BSD LICENSE](https://github.com/hashicorp/go-msgpack/blob/master/LICENSE) -- github.com/hashicorp/raft [MPL LICENSE](https://github.com/hashicorp/raft/blob/master/LICENSE) -- github.com/hashicorp/raft-boltdb [MOZILLA PUBLIC LICENSE](https://github.com/hashicorp/raft-boltdb/blob/master/LICENSE) -- github.com/influxdata/usage-client [MIT LICENSE](https://github.com/influxdata/usage-client/blob/master/LICENSE.txt) -- github.com/jwilder/encoding [MIT LICENSE](https://github.com/jwilder/encoding/blob/master/LICENSE) -- github.com/kimor79/gollectd [BSD LICENSE](https://github.com/kimor79/gollectd/blob/master/LICENSE) -- github.com/paulbellamy/ratecounter [MIT LICENSE](https://github.com/paulbellamy/ratecounter/blob/master/LICENSE) -- github.com/peterh/liner [MIT LICENSE](https://github.com/peterh/liner/blob/master/COPYING) -- github.com/rakyll/statik [APACHE LICENSE](https://github.com/rakyll/statik/blob/master/LICENSE) -- glyphicons [LICENSE](http://glyphicons.com/license/) -- golang.org/x/crypto [BSD LICENSE](https://github.com/golang/crypto/blob/master/LICENSE) -- golang.org/x/tools [BSD LICENSE](https://github.com/golang/tools/blob/master/LICENSE) -- gopkg.in/fatih/pool.v2 [MIT LICENSE](https://github.com/fatih/pool/blob/v2.0.0/LICENSE) -- jquery 2.1.4 [MIT LICENSE](https://github.com/jquery/jquery/blob/master/LICENSE.txt) -- react 0.13.3 [BSD LICENSE](https://github.com/facebook/react/blob/master/LICENSE) diff --git a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/client/v2/client.go b/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/client/v2/client.go deleted file mode 100644 index 19b950fb..00000000 --- a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/client/v2/client.go +++ /dev/null @@ -1,573 +0,0 @@ -package client - -import ( - "bytes" - "crypto/tls" - "encoding/json" - "errors" - "fmt" - "io/ioutil" - "net" - "net/http" - "net/url" - "time" - - "github.com/influxdata/influxdb/models" -) - -// UDPPayloadSize is a reasonable default payload size for UDP packets that -// could be travelling over the internet. -const ( - UDPPayloadSize = 512 -) - -// HTTPConfig is the config data needed to create an HTTP Client -type HTTPConfig struct { - // Addr should be of the form "http://host:port" - // or "http://[ipv6-host%zone]:port". - Addr string - - // Username is the influxdb username, optional - Username string - - // Password is the influxdb password, optional - Password string - - // UserAgent is the http User Agent, defaults to "InfluxDBClient" - UserAgent string - - // Timeout for influxdb writes, defaults to no timeout - Timeout time.Duration - - // InsecureSkipVerify gets passed to the http client, if true, it will - // skip https certificate verification. Defaults to false - InsecureSkipVerify bool - - // TLSConfig allows the user to set their own TLS config for the HTTP - // Client. If set, this option overrides InsecureSkipVerify. - TLSConfig *tls.Config -} - -// UDPConfig is the config data needed to create a UDP Client -type UDPConfig struct { - // Addr should be of the form "host:port" - // or "[ipv6-host%zone]:port". - Addr string - - // PayloadSize is the maximum size of a UDP client message, optional - // Tune this based on your network. Defaults to UDPBufferSize. - PayloadSize int -} - -// BatchPointsConfig is the config data needed to create an instance of the BatchPoints struct -type BatchPointsConfig struct { - // Precision is the write precision of the points, defaults to "ns" - Precision string - - // Database is the database to write points to - Database string - - // RetentionPolicy is the retention policy of the points - RetentionPolicy string - - // Write consistency is the number of servers required to confirm write - WriteConsistency string -} - -// Client is a client interface for writing & querying the database -type Client interface { - // Ping checks that status of cluster - Ping(timeout time.Duration) (time.Duration, string, error) - - // Write takes a BatchPoints object and writes all Points to InfluxDB. - Write(bp BatchPoints) error - - // Query makes an InfluxDB Query on the database. This will fail if using - // the UDP client. - Query(q Query) (*Response, error) - - // Close releases any resources a Client may be using. - Close() error -} - -// NewHTTPClient creates a client interface from the given config. -func NewHTTPClient(conf HTTPConfig) (Client, error) { - if conf.UserAgent == "" { - conf.UserAgent = "InfluxDBClient" - } - - u, err := url.Parse(conf.Addr) - if err != nil { - return nil, err - } else if u.Scheme != "http" && u.Scheme != "https" { - m := fmt.Sprintf("Unsupported protocol scheme: %s, your address"+ - " must start with http:// or https://", u.Scheme) - return nil, errors.New(m) - } - - tr := &http.Transport{ - TLSClientConfig: &tls.Config{ - InsecureSkipVerify: conf.InsecureSkipVerify, - }, - } - if conf.TLSConfig != nil { - tr.TLSClientConfig = conf.TLSConfig - } - return &client{ - url: u, - username: conf.Username, - password: conf.Password, - useragent: conf.UserAgent, - httpClient: &http.Client{ - Timeout: conf.Timeout, - Transport: tr, - }, - }, nil -} - -// Ping will check to see if the server is up with an optional timeout on waiting for leader. -// Ping returns how long the request took, the version of the server it connected to, and an error if one occurred. -func (c *client) Ping(timeout time.Duration) (time.Duration, string, error) { - now := time.Now() - u := c.url - u.Path = "ping" - - req, err := http.NewRequest("GET", u.String(), nil) - if err != nil { - return 0, "", err - } - - req.Header.Set("User-Agent", c.useragent) - - if c.username != "" { - req.SetBasicAuth(c.username, c.password) - } - - if timeout > 0 { - params := req.URL.Query() - params.Set("wait_for_leader", fmt.Sprintf("%.0fs", timeout.Seconds())) - req.URL.RawQuery = params.Encode() - } - - resp, err := c.httpClient.Do(req) - if err != nil { - return 0, "", err - } - defer resp.Body.Close() - - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return 0, "", err - } - - if resp.StatusCode != http.StatusNoContent { - var err = fmt.Errorf(string(body)) - return 0, "", err - } - - version := resp.Header.Get("X-Influxdb-Version") - return time.Since(now), version, nil -} - -// Close releases the client's resources. -func (c *client) Close() error { - return nil -} - -// NewUDPClient returns a client interface for writing to an InfluxDB UDP -// service from the given config. -func NewUDPClient(conf UDPConfig) (Client, error) { - var udpAddr *net.UDPAddr - udpAddr, err := net.ResolveUDPAddr("udp", conf.Addr) - if err != nil { - return nil, err - } - - conn, err := net.DialUDP("udp", nil, udpAddr) - if err != nil { - return nil, err - } - - payloadSize := conf.PayloadSize - if payloadSize == 0 { - payloadSize = UDPPayloadSize - } - - return &udpclient{ - conn: conn, - payloadSize: payloadSize, - }, nil -} - -// Ping will check to see if the server is up with an optional timeout on waiting for leader. -// Ping returns how long the request took, the version of the server it connected to, and an error if one occurred. -func (uc *udpclient) Ping(timeout time.Duration) (time.Duration, string, error) { - return 0, "", nil -} - -// Close releases the udpclient's resources. -func (uc *udpclient) Close() error { - return uc.conn.Close() -} - -type client struct { - url *url.URL - username string - password string - useragent string - httpClient *http.Client -} - -type udpclient struct { - conn *net.UDPConn - payloadSize int -} - -// BatchPoints is an interface into a batched grouping of points to write into -// InfluxDB together. BatchPoints is NOT thread-safe, you must create a separate -// batch for each goroutine. -type BatchPoints interface { - // AddPoint adds the given point to the Batch of points - AddPoint(p *Point) - // AddPoints adds the given points to the Batch of points - AddPoints(ps []*Point) - // Points lists the points in the Batch - Points() []*Point - - // Precision returns the currently set precision of this Batch - Precision() string - // SetPrecision sets the precision of this batch. - SetPrecision(s string) error - - // Database returns the currently set database of this Batch - Database() string - // SetDatabase sets the database of this Batch - SetDatabase(s string) - - // WriteConsistency returns the currently set write consistency of this Batch - WriteConsistency() string - // SetWriteConsistency sets the write consistency of this Batch - SetWriteConsistency(s string) - - // RetentionPolicy returns the currently set retention policy of this Batch - RetentionPolicy() string - // SetRetentionPolicy sets the retention policy of this Batch - SetRetentionPolicy(s string) -} - -// NewBatchPoints returns a BatchPoints interface based on the given config. -func NewBatchPoints(conf BatchPointsConfig) (BatchPoints, error) { - if conf.Precision == "" { - conf.Precision = "ns" - } - if _, err := time.ParseDuration("1" + conf.Precision); err != nil { - return nil, err - } - bp := &batchpoints{ - database: conf.Database, - precision: conf.Precision, - retentionPolicy: conf.RetentionPolicy, - writeConsistency: conf.WriteConsistency, - } - return bp, nil -} - -type batchpoints struct { - points []*Point - database string - precision string - retentionPolicy string - writeConsistency string -} - -func (bp *batchpoints) AddPoint(p *Point) { - bp.points = append(bp.points, p) -} - -func (bp *batchpoints) AddPoints(ps []*Point) { - bp.points = append(bp.points, ps...) -} - -func (bp *batchpoints) Points() []*Point { - return bp.points -} - -func (bp *batchpoints) Precision() string { - return bp.precision -} - -func (bp *batchpoints) Database() string { - return bp.database -} - -func (bp *batchpoints) WriteConsistency() string { - return bp.writeConsistency -} - -func (bp *batchpoints) RetentionPolicy() string { - return bp.retentionPolicy -} - -func (bp *batchpoints) SetPrecision(p string) error { - if _, err := time.ParseDuration("1" + p); err != nil { - return err - } - bp.precision = p - return nil -} - -func (bp *batchpoints) SetDatabase(db string) { - bp.database = db -} - -func (bp *batchpoints) SetWriteConsistency(wc string) { - bp.writeConsistency = wc -} - -func (bp *batchpoints) SetRetentionPolicy(rp string) { - bp.retentionPolicy = rp -} - -// Point represents a single data point -type Point struct { - pt models.Point -} - -// NewPoint returns a point with the given timestamp. If a timestamp is not -// given, then data is sent to the database without a timestamp, in which case -// the server will assign local time upon reception. NOTE: it is recommended to -// send data with a timestamp. -func NewPoint( - name string, - tags map[string]string, - fields map[string]interface{}, - t ...time.Time, -) (*Point, error) { - var T time.Time - if len(t) > 0 { - T = t[0] - } - - pt, err := models.NewPoint(name, tags, fields, T) - if err != nil { - return nil, err - } - return &Point{ - pt: pt, - }, nil -} - -// String returns a line-protocol string of the Point -func (p *Point) String() string { - return p.pt.String() -} - -// PrecisionString returns a line-protocol string of the Point, at precision -func (p *Point) PrecisionString(precison string) string { - return p.pt.PrecisionString(precison) -} - -// Name returns the measurement name of the point -func (p *Point) Name() string { - return p.pt.Name() -} - -// Tags returns the tags associated with the point -func (p *Point) Tags() map[string]string { - return p.pt.Tags() -} - -// Time return the timestamp for the point -func (p *Point) Time() time.Time { - return p.pt.Time() -} - -// UnixNano returns the unix nano time of the point -func (p *Point) UnixNano() int64 { - return p.pt.UnixNano() -} - -// Fields returns the fields for the point -func (p *Point) Fields() map[string]interface{} { - return p.pt.Fields() -} - -// NewPointFrom returns a point from the provided models.Point. -func NewPointFrom(pt models.Point) *Point { - return &Point{pt: pt} -} - -func (uc *udpclient) Write(bp BatchPoints) error { - var b bytes.Buffer - var d time.Duration - d, _ = time.ParseDuration("1" + bp.Precision()) - - for _, p := range bp.Points() { - pointstring := p.pt.RoundedString(d) + "\n" - - // Write and reset the buffer if we reach the max size - if b.Len()+len(pointstring) >= uc.payloadSize { - if _, err := uc.conn.Write(b.Bytes()); err != nil { - return err - } - b.Reset() - } - - if _, err := b.WriteString(pointstring); err != nil { - return err - } - } - - _, err := uc.conn.Write(b.Bytes()) - return err -} - -func (c *client) Write(bp BatchPoints) error { - var b bytes.Buffer - - for _, p := range bp.Points() { - if _, err := b.WriteString(p.pt.PrecisionString(bp.Precision())); err != nil { - return err - } - - if err := b.WriteByte('\n'); err != nil { - return err - } - } - - u := c.url - u.Path = "write" - req, err := http.NewRequest("POST", u.String(), &b) - if err != nil { - return err - } - req.Header.Set("Content-Type", "") - req.Header.Set("User-Agent", c.useragent) - if c.username != "" { - req.SetBasicAuth(c.username, c.password) - } - - params := req.URL.Query() - params.Set("db", bp.Database()) - params.Set("rp", bp.RetentionPolicy()) - params.Set("precision", bp.Precision()) - params.Set("consistency", bp.WriteConsistency()) - req.URL.RawQuery = params.Encode() - - resp, err := c.httpClient.Do(req) - if err != nil { - return err - } - defer resp.Body.Close() - - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return err - } - - if resp.StatusCode != http.StatusNoContent && resp.StatusCode != http.StatusOK { - var err = fmt.Errorf(string(body)) - return err - } - - return nil -} - -// Query defines a query to send to the server -type Query struct { - Command string - Database string - Precision string -} - -// NewQuery returns a query object -// database and precision strings can be empty strings if they are not needed -// for the query. -func NewQuery(command, database, precision string) Query { - return Query{ - Command: command, - Database: database, - Precision: precision, - } -} - -// Response represents a list of statement results. -type Response struct { - Results []Result - Err string `json:"error,omitempty"` -} - -// Error returns the first error from any statement. -// Returns nil if no errors occurred on any statements. -func (r *Response) Error() error { - if r.Err != "" { - return fmt.Errorf(r.Err) - } - for _, result := range r.Results { - if result.Err != "" { - return fmt.Errorf(result.Err) - } - } - return nil -} - -// Result represents a resultset returned from a single statement. -type Result struct { - Series []models.Row - Err string `json:"error,omitempty"` -} - -func (uc *udpclient) Query(q Query) (*Response, error) { - return nil, fmt.Errorf("Querying via UDP is not supported") -} - -// Query sends a command to the server and returns the Response -func (c *client) Query(q Query) (*Response, error) { - u := c.url - u.Path = "query" - - req, err := http.NewRequest("GET", u.String(), nil) - if err != nil { - return nil, err - } - req.Header.Set("Content-Type", "") - req.Header.Set("User-Agent", c.useragent) - if c.username != "" { - req.SetBasicAuth(c.username, c.password) - } - - params := req.URL.Query() - params.Set("q", q.Command) - params.Set("db", q.Database) - if q.Precision != "" { - params.Set("epoch", q.Precision) - } - req.URL.RawQuery = params.Encode() - - resp, err := c.httpClient.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - var response Response - dec := json.NewDecoder(resp.Body) - dec.UseNumber() - decErr := dec.Decode(&response) - - // ignore this error if we got an invalid status code - if decErr != nil && decErr.Error() == "EOF" && resp.StatusCode != http.StatusOK { - decErr = nil - } - // If we got a valid decode error, send that back - if decErr != nil { - return nil, decErr - } - // If we don't have an error in our json response, and didn't get statusOK - // then send back an error - if resp.StatusCode != http.StatusOK && response.Error() == nil { - return &response, fmt.Errorf("received status code %d from server", - resp.StatusCode) - } - return &response, nil -} diff --git a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/consistency.go b/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/consistency.go deleted file mode 100644 index 97cdc51a..00000000 --- a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/consistency.go +++ /dev/null @@ -1,46 +0,0 @@ -package models - -import ( - "errors" - "strings" -) - -// ConsistencyLevel represent a required replication criteria before a write can -// be returned as successful -type ConsistencyLevel int - -const ( - // ConsistencyLevelAny allows for hinted hand off, potentially no write happened yet - ConsistencyLevelAny ConsistencyLevel = iota - - // ConsistencyLevelOne requires at least one data node acknowledged a write - ConsistencyLevelOne - - // ConsistencyLevelQuorum requires a quorum of data nodes to acknowledge a write - ConsistencyLevelQuorum - - // ConsistencyLevelAll requires all data nodes to acknowledge a write - ConsistencyLevelAll -) - -var ( - // ErrInvalidConsistencyLevel is returned when parsing the string version - // of a consistency level. - ErrInvalidConsistencyLevel = errors.New("invalid consistency level") -) - -// ParseConsistencyLevel converts a consistency level string to the corresponding ConsistencyLevel const -func ParseConsistencyLevel(level string) (ConsistencyLevel, error) { - switch strings.ToLower(level) { - case "any": - return ConsistencyLevelAny, nil - case "one": - return ConsistencyLevelOne, nil - case "quorum": - return ConsistencyLevelQuorum, nil - case "all": - return ConsistencyLevelAll, nil - default: - return 0, ErrInvalidConsistencyLevel - } -} diff --git a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/points.go b/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/points.go deleted file mode 100644 index f9ff203f..00000000 --- a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/points.go +++ /dev/null @@ -1,1553 +0,0 @@ -package models - -import ( - "bytes" - "encoding/binary" - "errors" - "fmt" - "hash/fnv" - "math" - "sort" - "strconv" - "strings" - "time" - - "github.com/influxdata/influxdb/pkg/escape" -) - -var ( - measurementEscapeCodes = map[byte][]byte{ - ',': []byte(`\,`), - ' ': []byte(`\ `), - } - - tagEscapeCodes = map[byte][]byte{ - ',': []byte(`\,`), - ' ': []byte(`\ `), - '=': []byte(`\=`), - } - - ErrPointMustHaveAField = errors.New("point without fields is unsupported") - ErrInvalidNumber = errors.New("invalid number") -) - -// Point defines the values that will be written to the database -type Point interface { - Name() string - SetName(string) - - Tags() Tags - AddTag(key, value string) - SetTags(tags Tags) - - Fields() Fields - - Time() time.Time - SetTime(t time.Time) - UnixNano() int64 - - HashID() uint64 - Key() []byte - - Data() []byte - SetData(buf []byte) - - // String returns a string representation of the point, if there is a - // timestamp associated with the point then it will be specified with the default - // precision of nanoseconds - String() string - - // Bytes returns a []byte representation of the point similar to string. - MarshalBinary() ([]byte, error) - - // PrecisionString returns a string representation of the point, if there - // is a timestamp associated with the point then it will be specified in the - // given unit - PrecisionString(precision string) string - - // RoundedString returns a string representation of the point, if there - // is a timestamp associated with the point, then it will be rounded to the - // given duration - RoundedString(d time.Duration) string -} - -// Points represents a sortable list of points by timestamp. -type Points []Point - -func (a Points) Len() int { return len(a) } -func (a Points) Less(i, j int) bool { return a[i].Time().Before(a[j].Time()) } -func (a Points) Swap(i, j int) { a[i], a[j] = a[j], a[i] } - -// point is the default implementation of Point. -type point struct { - time time.Time - - // text encoding of measurement and tags - // key must always be stored sorted by tags, if the original line was not sorted, - // we need to resort it - key []byte - - // text encoding of field data - fields []byte - - // text encoding of timestamp - ts []byte - - // binary encoded field data - data []byte - - // cached version of parsed fields from data - cachedFields map[string]interface{} - - // cached version of parsed name from key - cachedName string -} - -const ( - // the number of characters for the largest possible int64 (9223372036854775807) - maxInt64Digits = 19 - - // the number of characters for the smallest possible int64 (-9223372036854775808) - minInt64Digits = 20 - - // the number of characters required for the largest float64 before a range check - // would occur during parsing - maxFloat64Digits = 25 - - // the number of characters required for smallest float64 before a range check occur - // would occur during parsing - minFloat64Digits = 27 -) - -// ParsePoints returns a slice of Points from a text representation of a point -// with each point separated by newlines. If any points fail to parse, a non-nil error -// will be returned in addition to the points that parsed successfully. -func ParsePoints(buf []byte) ([]Point, error) { - return ParsePointsWithPrecision(buf, time.Now().UTC(), "n") -} - -// ParsePointsString is identical to ParsePoints but accepts a string -// buffer. -func ParsePointsString(buf string) ([]Point, error) { - return ParsePoints([]byte(buf)) -} - -// ParseKey returns the measurement name and tags from a point. -func ParseKey(buf string) (string, Tags, error) { - _, keyBuf, err := scanKey([]byte(buf), 0) - tags := parseTags([]byte(buf)) - return string(keyBuf), tags, err -} - -// ParsePointsWithPrecision is similar to ParsePoints, but allows the -// caller to provide a precision for time. -func ParsePointsWithPrecision(buf []byte, defaultTime time.Time, precision string) ([]Point, error) { - points := []Point{} - var ( - pos int - block []byte - failed []string - ) - for { - pos, block = scanLine(buf, pos) - pos++ - - if len(block) == 0 { - break - } - - // lines which start with '#' are comments - start := skipWhitespace(block, 0) - - // If line is all whitespace, just skip it - if start >= len(block) { - continue - } - - if block[start] == '#' { - continue - } - - // strip the newline if one is present - if block[len(block)-1] == '\n' { - block = block[:len(block)-1] - } - - pt, err := parsePoint(block[start:len(block)], defaultTime, precision) - if err != nil { - failed = append(failed, fmt.Sprintf("unable to parse '%s': %v", string(block[start:len(block)]), err)) - } else { - points = append(points, pt) - } - - if pos >= len(buf) { - break - } - - } - if len(failed) > 0 { - return points, fmt.Errorf("%s", strings.Join(failed, "\n")) - } - return points, nil - -} - -func parsePoint(buf []byte, defaultTime time.Time, precision string) (Point, error) { - // scan the first block which is measurement[,tag1=value1,tag2=value=2...] - pos, key, err := scanKey(buf, 0) - if err != nil { - return nil, err - } - - // measurement name is required - if len(key) == 0 { - return nil, fmt.Errorf("missing measurement") - } - - // scan the second block is which is field1=value1[,field2=value2,...] - pos, fields, err := scanFields(buf, pos) - if err != nil { - return nil, err - } - - // at least one field is required - if len(fields) == 0 { - return nil, fmt.Errorf("missing fields") - } - - // scan the last block which is an optional integer timestamp - pos, ts, err := scanTime(buf, pos) - - if err != nil { - return nil, err - } - - pt := &point{ - key: key, - fields: fields, - ts: ts, - } - - if len(ts) == 0 { - pt.time = defaultTime - pt.SetPrecision(precision) - } else { - ts, err := strconv.ParseInt(string(ts), 10, 64) - if err != nil { - return nil, err - } - pt.time, err = SafeCalcTime(ts, precision) - if err != nil { - return nil, err - } - } - return pt, nil -} - -// GetPrecisionMultiplier will return a multiplier for the precision specified -func GetPrecisionMultiplier(precision string) int64 { - d := time.Nanosecond - switch precision { - case "u": - d = time.Microsecond - case "ms": - d = time.Millisecond - case "s": - d = time.Second - case "m": - d = time.Minute - case "h": - d = time.Hour - } - return int64(d) -} - -// scanKey scans buf starting at i for the measurement and tag portion of the point. -// It returns the ending position and the byte slice of key within buf. If there -// are tags, they will be sorted if they are not already. -func scanKey(buf []byte, i int) (int, []byte, error) { - start := skipWhitespace(buf, i) - - i = start - - // Determines whether the tags are sort, assume they are - sorted := true - - // indices holds the indexes within buf of the start of each tag. For example, - // a buf of 'cpu,host=a,region=b,zone=c' would have indices slice of [4,11,20] - // which indicates that the first tag starts at buf[4], seconds at buf[11], and - // last at buf[20] - indices := make([]int, 100) - - // tracks how many commas we've seen so we know how many values are indices. - // Since indices is an arbitrarily large slice, - // we need to know how many values in the buffer are in use. - commas := 0 - - // First scan the Point's measurement. - state, i, err := scanMeasurement(buf, i) - if err != nil { - return i, buf[start:i], err - } - - // Optionally scan tags if needed. - if state == tagKeyState { - i, commas, indices, err = scanTags(buf, i, indices) - if err != nil { - return i, buf[start:i], err - } - } - - // Now we know where the key region is within buf, and the locations of tags, we - // need to determine if duplicate tags exist and if the tags are sorted. This iterates - // 1/2 of the list comparing each end with each other, walking towards the center from - // both sides. - for j := 0; j < commas/2; j++ { - // get the left and right tags - _, left := scanTo(buf[indices[j]:indices[j+1]-1], 0, '=') - _, right := scanTo(buf[indices[commas-j-1]:indices[commas-j]-1], 0, '=') - - // If the tags are equal, then there are duplicate tags, and we should abort - if bytes.Equal(left, right) { - return i, buf[start:i], fmt.Errorf("duplicate tags") - } - - // If left is greater than right, the tags are not sorted. We must continue - // since their could be duplicate tags still. - if bytes.Compare(left, right) > 0 { - sorted = false - } - } - - // If the tags are not sorted, then sort them. This sort is inline and - // uses the tag indices we created earlier. The actual buffer is not sorted, the - // indices are using the buffer for value comparison. After the indices are sorted, - // the buffer is reconstructed from the sorted indices. - if !sorted && commas > 0 { - // Get the measurement name for later - measurement := buf[start : indices[0]-1] - - // Sort the indices - indices := indices[:commas] - insertionSort(0, commas, buf, indices) - - // Create a new key using the measurement and sorted indices - b := make([]byte, len(buf[start:i])) - pos := copy(b, measurement) - for _, i := range indices { - b[pos] = ',' - pos++ - _, v := scanToSpaceOr(buf, i, ',') - pos += copy(b[pos:], v) - } - - return i, b, nil - } - - return i, buf[start:i], nil -} - -// The following constants allow us to specify which state to move to -// next, when scanning sections of a Point. -const ( - tagKeyState = iota - tagValueState - fieldsState -) - -// scanMeasurement examines the measurement part of a Point, returning -// the next state to move to, and the current location in the buffer. -func scanMeasurement(buf []byte, i int) (int, int, error) { - // Check first byte of measurement, anything except a comma is fine. - // It can't be a space, since whitespace is stripped prior to this - // function call. - if buf[i] == ',' { - return -1, i, fmt.Errorf("missing measurement") - } - - for { - i++ - if i >= len(buf) { - // cpu - return -1, i, fmt.Errorf("missing fields") - } - - if buf[i-1] == '\\' { - // Skip character (it's escaped). - continue - } - - // Unescaped comma; move onto scanning the tags. - if buf[i] == ',' { - return tagKeyState, i + 1, nil - } - - // Unescaped space; move onto scanning the fields. - if buf[i] == ' ' { - // cpu value=1.0 - return fieldsState, i, nil - } - } -} - -// scanTags examines all the tags in a Point, keeping track of and -// returning the updated indices slice, number of commas and location -// in buf where to start examining the Point fields. -func scanTags(buf []byte, i int, indices []int) (int, int, []int, error) { - var ( - err error - commas int - state = tagKeyState - ) - - for { - switch state { - case tagKeyState: - // Grow our indices slice if we have too many tags. - if commas >= len(indices) { - newIndics := make([]int, cap(indices)*2) - copy(newIndics, indices) - indices = newIndics - } - indices[commas] = i - commas++ - - i, err = scanTagsKey(buf, i) - state = tagValueState // tag value always follows a tag key - case tagValueState: - state, i, err = scanTagsValue(buf, i) - case fieldsState: - indices[commas] = i + 1 - return i, commas, indices, nil - } - - if err != nil { - return i, commas, indices, err - } - } -} - -// scanTagsKey scans each character in a tag key. -func scanTagsKey(buf []byte, i int) (int, error) { - // First character of the key. - if i >= len(buf) || buf[i] == ' ' || buf[i] == ',' || buf[i] == '=' { - // cpu,{'', ' ', ',', '='} - return i, fmt.Errorf("missing tag key") - } - - // Examine each character in the tag key until we hit an unescaped - // equals (the tag value), or we hit an error (i.e., unescaped - // space or comma). - for { - i++ - - // Either we reached the end of the buffer or we hit an - // unescaped comma or space. - if i >= len(buf) || - ((buf[i] == ' ' || buf[i] == ',') && buf[i-1] != '\\') { - // cpu,tag{'', ' ', ','} - return i, fmt.Errorf("missing tag value") - } - - if buf[i] == '=' && buf[i-1] != '\\' { - // cpu,tag= - return i + 1, nil - } - } -} - -// scanTagsValue scans each character in a tag value. -func scanTagsValue(buf []byte, i int) (int, int, error) { - // Tag value cannot be empty. - if i >= len(buf) || buf[i] == ',' || buf[i] == ' ' { - // cpu,tag={',', ' '} - return -1, i, fmt.Errorf("missing tag value") - } - - // Examine each character in the tag value until we hit an unescaped - // comma (move onto next tag key), an unescaped space (move onto - // fields), or we error out. - for { - i++ - if i >= len(buf) { - // cpu,tag=value - return -1, i, fmt.Errorf("missing fields") - } - - // An unescaped equals sign is an invalid tag value. - if buf[i] == '=' && buf[i-1] != '\\' { - // cpu,tag={'=', 'fo=o'} - return -1, i, fmt.Errorf("invalid tag format") - } - - if buf[i] == ',' && buf[i-1] != '\\' { - // cpu,tag=foo, - return tagKeyState, i + 1, nil - } - - // cpu,tag=foo value=1.0 - // cpu, tag=foo\= value=1.0 - if buf[i] == ' ' && buf[i-1] != '\\' { - return fieldsState, i, nil - } - } -} - -func insertionSort(l, r int, buf []byte, indices []int) { - for i := l + 1; i < r; i++ { - for j := i; j > l && less(buf, indices, j, j-1); j-- { - indices[j], indices[j-1] = indices[j-1], indices[j] - } - } -} - -func less(buf []byte, indices []int, i, j int) bool { - // This grabs the tag names for i & j, it ignores the values - _, a := scanTo(buf, indices[i], '=') - _, b := scanTo(buf, indices[j], '=') - return bytes.Compare(a, b) < 0 -} - -func isFieldEscapeChar(b byte) bool { - for c := range escape.Codes { - if c == b { - return true - } - } - return false -} - -// scanFields scans buf, starting at i for the fields section of a point. It returns -// the ending position and the byte slice of the fields within buf -func scanFields(buf []byte, i int) (int, []byte, error) { - start := skipWhitespace(buf, i) - i = start - quoted := false - - // tracks how many '=' we've seen - equals := 0 - - // tracks how many commas we've seen - commas := 0 - - for { - // reached the end of buf? - if i >= len(buf) { - break - } - - // escaped characters? - if buf[i] == '\\' && i+1 < len(buf) { - i += 2 - continue - } - - // If the value is quoted, scan until we get to the end quote - // Only quote values in the field value since quotes are not significant - // in the field key - if buf[i] == '"' && equals > commas { - quoted = !quoted - i++ - continue - } - - // If we see an =, ensure that there is at least on char before and after it - if buf[i] == '=' && !quoted { - equals++ - - // check for "... =123" but allow "a\ =123" - if buf[i-1] == ' ' && buf[i-2] != '\\' { - return i, buf[start:i], fmt.Errorf("missing field key") - } - - // check for "...a=123,=456" but allow "a=123,a\,=456" - if buf[i-1] == ',' && buf[i-2] != '\\' { - return i, buf[start:i], fmt.Errorf("missing field key") - } - - // check for "... value=" - if i+1 >= len(buf) { - return i, buf[start:i], fmt.Errorf("missing field value") - } - - // check for "... value=,value2=..." - if buf[i+1] == ',' || buf[i+1] == ' ' { - return i, buf[start:i], fmt.Errorf("missing field value") - } - - if isNumeric(buf[i+1]) || buf[i+1] == '-' || buf[i+1] == 'N' || buf[i+1] == 'n' { - var err error - i, err = scanNumber(buf, i+1) - if err != nil { - return i, buf[start:i], err - } - continue - } - // If next byte is not a double-quote, the value must be a boolean - if buf[i+1] != '"' { - var err error - i, _, err = scanBoolean(buf, i+1) - if err != nil { - return i, buf[start:i], err - } - continue - } - } - - if buf[i] == ',' && !quoted { - commas++ - } - - // reached end of block? - if buf[i] == ' ' && !quoted { - break - } - i++ - } - - if quoted { - return i, buf[start:i], fmt.Errorf("unbalanced quotes") - } - - // check that all field sections had key and values (e.g. prevent "a=1,b" - if equals == 0 || commas != equals-1 { - return i, buf[start:i], fmt.Errorf("invalid field format") - } - - return i, buf[start:i], nil -} - -// scanTime scans buf, starting at i for the time section of a point. It returns -// the ending position and the byte slice of the fields within buf and error if the -// timestamp is not in the correct numeric format -func scanTime(buf []byte, i int) (int, []byte, error) { - start := skipWhitespace(buf, i) - i = start - for { - // reached the end of buf? - if i >= len(buf) { - break - } - - // Timestamps should be integers, make sure they are so we don't need to actually - // parse the timestamp until needed - if buf[i] < '0' || buf[i] > '9' { - // Handle negative timestamps - if i == start && buf[i] == '-' { - i++ - continue - } - return i, buf[start:i], fmt.Errorf("bad timestamp") - } - - // reached end of block? - if buf[i] == '\n' { - break - } - i++ - } - return i, buf[start:i], nil -} - -func isNumeric(b byte) bool { - return (b >= '0' && b <= '9') || b == '.' -} - -// scanNumber returns the end position within buf, start at i after -// scanning over buf for an integer, or float. It returns an -// error if a invalid number is scanned. -func scanNumber(buf []byte, i int) (int, error) { - start := i - var isInt bool - - // Is negative number? - if i < len(buf) && buf[i] == '-' { - i++ - // There must be more characters now, as just '-' is illegal. - if i == len(buf) { - return i, ErrInvalidNumber - } - } - - // how many decimal points we've see - decimal := false - - // indicates the number is float in scientific notation - scientific := false - - for { - if i >= len(buf) { - break - } - - if buf[i] == ',' || buf[i] == ' ' { - break - } - - if buf[i] == 'i' && i > start && !isInt { - isInt = true - i++ - continue - } - - if buf[i] == '.' { - // Can't have more than 1 decimal (e.g. 1.1.1 should fail) - if decimal { - return i, ErrInvalidNumber - } - decimal = true - } - - // `e` is valid for floats but not as the first char - if i > start && (buf[i] == 'e' || buf[i] == 'E') { - scientific = true - i++ - continue - } - - // + and - are only valid at this point if they follow an e (scientific notation) - if (buf[i] == '+' || buf[i] == '-') && (buf[i-1] == 'e' || buf[i-1] == 'E') { - i++ - continue - } - - // NaN is an unsupported value - if i+2 < len(buf) && (buf[i] == 'N' || buf[i] == 'n') { - return i, ErrInvalidNumber - } - - if !isNumeric(buf[i]) { - return i, ErrInvalidNumber - } - i++ - } - - if isInt && (decimal || scientific) { - return i, ErrInvalidNumber - } - - numericDigits := i - start - if isInt { - numericDigits-- - } - if decimal { - numericDigits-- - } - if buf[start] == '-' { - numericDigits-- - } - - if numericDigits == 0 { - return i, ErrInvalidNumber - } - - // It's more common that numbers will be within min/max range for their type but we need to prevent - // out or range numbers from being parsed successfully. This uses some simple heuristics to decide - // if we should parse the number to the actual type. It does not do it all the time because it incurs - // extra allocations and we end up converting the type again when writing points to disk. - if isInt { - // Make sure the last char is an 'i' for integers (e.g. 9i10 is not valid) - if buf[i-1] != 'i' { - return i, ErrInvalidNumber - } - // Parse the int to check bounds the number of digits could be larger than the max range - // We subtract 1 from the index to remove the `i` from our tests - if len(buf[start:i-1]) >= maxInt64Digits || len(buf[start:i-1]) >= minInt64Digits { - if _, err := strconv.ParseInt(string(buf[start:i-1]), 10, 64); err != nil { - return i, fmt.Errorf("unable to parse integer %s: %s", buf[start:i-1], err) - } - } - } else { - // Parse the float to check bounds if it's scientific or the number of digits could be larger than the max range - if scientific || len(buf[start:i]) >= maxFloat64Digits || len(buf[start:i]) >= minFloat64Digits { - if _, err := strconv.ParseFloat(string(buf[start:i]), 10); err != nil { - return i, fmt.Errorf("invalid float") - } - } - } - - return i, nil -} - -// scanBoolean returns the end position within buf, start at i after -// scanning over buf for boolean. Valid values for a boolean are -// t, T, true, TRUE, f, F, false, FALSE. It returns an error if a invalid boolean -// is scanned. -func scanBoolean(buf []byte, i int) (int, []byte, error) { - start := i - - if i < len(buf) && (buf[i] != 't' && buf[i] != 'f' && buf[i] != 'T' && buf[i] != 'F') { - return i, buf[start:i], fmt.Errorf("invalid boolean") - } - - i++ - for { - if i >= len(buf) { - break - } - - if buf[i] == ',' || buf[i] == ' ' { - break - } - i++ - } - - // Single char bool (t, T, f, F) is ok - if i-start == 1 { - return i, buf[start:i], nil - } - - // length must be 4 for true or TRUE - if (buf[start] == 't' || buf[start] == 'T') && i-start != 4 { - return i, buf[start:i], fmt.Errorf("invalid boolean") - } - - // length must be 5 for false or FALSE - if (buf[start] == 'f' || buf[start] == 'F') && i-start != 5 { - return i, buf[start:i], fmt.Errorf("invalid boolean") - } - - // Otherwise - valid := false - switch buf[start] { - case 't': - valid = bytes.Equal(buf[start:i], []byte("true")) - case 'f': - valid = bytes.Equal(buf[start:i], []byte("false")) - case 'T': - valid = bytes.Equal(buf[start:i], []byte("TRUE")) || bytes.Equal(buf[start:i], []byte("True")) - case 'F': - valid = bytes.Equal(buf[start:i], []byte("FALSE")) || bytes.Equal(buf[start:i], []byte("False")) - } - - if !valid { - return i, buf[start:i], fmt.Errorf("invalid boolean") - } - - return i, buf[start:i], nil - -} - -// skipWhitespace returns the end position within buf, starting at i after -// scanning over spaces in tags -func skipWhitespace(buf []byte, i int) int { - for i < len(buf) { - if buf[i] != ' ' && buf[i] != '\t' && buf[i] != 0 { - break - } - i++ - } - return i -} - -// scanLine returns the end position in buf and the next line found within -// buf. -func scanLine(buf []byte, i int) (int, []byte) { - start := i - quoted := false - fields := false - - // tracks how many '=' and commas we've seen - // this duplicates some of the functionality in scanFields - equals := 0 - commas := 0 - for { - // reached the end of buf? - if i >= len(buf) { - break - } - - // skip past escaped characters - if buf[i] == '\\' { - i += 2 - continue - } - - if buf[i] == ' ' { - fields = true - } - - // If we see a double quote, makes sure it is not escaped - if fields { - if !quoted && buf[i] == '=' { - i++ - equals++ - continue - } else if !quoted && buf[i] == ',' { - i++ - commas++ - continue - } else if buf[i] == '"' && equals > commas { - i++ - quoted = !quoted - continue - } - } - - if buf[i] == '\n' && !quoted { - break - } - - i++ - } - - return i, buf[start:i] -} - -// scanTo returns the end position in buf and the next consecutive block -// of bytes, starting from i and ending with stop byte, where stop byte -// has not been escaped. -// -// If there are leading spaces, they are skipped. -func scanTo(buf []byte, i int, stop byte) (int, []byte) { - start := i - for { - // reached the end of buf? - if i >= len(buf) { - break - } - - // Reached unescaped stop value? - if buf[i] == stop && (i == 0 || buf[i-1] != '\\') { - break - } - i++ - } - - return i, buf[start:i] -} - -// scanTo returns the end position in buf and the next consecutive block -// of bytes, starting from i and ending with stop byte. If there are leading -// spaces, they are skipped. -func scanToSpaceOr(buf []byte, i int, stop byte) (int, []byte) { - start := i - if buf[i] == stop || buf[i] == ' ' { - return i, buf[start:i] - } - - for { - i++ - if buf[i-1] == '\\' { - continue - } - - // reached the end of buf? - if i >= len(buf) { - return i, buf[start:i] - } - - // reached end of block? - if buf[i] == stop || buf[i] == ' ' { - return i, buf[start:i] - } - } -} - -func scanTagValue(buf []byte, i int) (int, []byte) { - start := i - for { - if i >= len(buf) { - break - } - - if buf[i] == ',' && buf[i-1] != '\\' { - break - } - i++ - } - return i, buf[start:i] -} - -func scanFieldValue(buf []byte, i int) (int, []byte) { - start := i - quoted := false - for { - if i >= len(buf) { - break - } - - // Only escape char for a field value is a double-quote - if buf[i] == '\\' && i+1 < len(buf) && buf[i+1] == '"' { - i += 2 - continue - } - - // Quoted value? (e.g. string) - if buf[i] == '"' { - i++ - quoted = !quoted - continue - } - - if buf[i] == ',' && !quoted { - break - } - i++ - } - return i, buf[start:i] -} - -func escapeMeasurement(in []byte) []byte { - for b, esc := range measurementEscapeCodes { - in = bytes.Replace(in, []byte{b}, esc, -1) - } - return in -} - -func unescapeMeasurement(in []byte) []byte { - for b, esc := range measurementEscapeCodes { - in = bytes.Replace(in, esc, []byte{b}, -1) - } - return in -} - -func escapeTag(in []byte) []byte { - for b, esc := range tagEscapeCodes { - if bytes.Contains(in, []byte{b}) { - in = bytes.Replace(in, []byte{b}, esc, -1) - } - } - return in -} - -func unescapeTag(in []byte) []byte { - for b, esc := range tagEscapeCodes { - if bytes.Contains(in, []byte{b}) { - in = bytes.Replace(in, esc, []byte{b}, -1) - } - } - return in -} - -// escapeStringField returns a copy of in with any double quotes or -// backslashes with escaped values -func escapeStringField(in string) string { - var out []byte - i := 0 - for { - if i >= len(in) { - break - } - // escape double-quotes - if in[i] == '\\' { - out = append(out, '\\') - out = append(out, '\\') - i++ - continue - } - // escape double-quotes - if in[i] == '"' { - out = append(out, '\\') - out = append(out, '"') - i++ - continue - } - out = append(out, in[i]) - i++ - - } - return string(out) -} - -// unescapeStringField returns a copy of in with any escaped double-quotes -// or backslashes unescaped -func unescapeStringField(in string) string { - var out []byte - i := 0 - for { - if i >= len(in) { - break - } - // unescape backslashes - if in[i] == '\\' && i+1 < len(in) && in[i+1] == '\\' { - out = append(out, '\\') - i += 2 - continue - } - // unescape double-quotes - if in[i] == '\\' && i+1 < len(in) && in[i+1] == '"' { - out = append(out, '"') - i += 2 - continue - } - out = append(out, in[i]) - i++ - - } - return string(out) -} - -// NewPoint returns a new point with the given measurement name, tags, fields and timestamp. If -// an unsupported field value (NaN) or out of range time is passed, this function returns an error. -func NewPoint(name string, tags Tags, fields Fields, time time.Time) (Point, error) { - if len(fields) == 0 { - return nil, ErrPointMustHaveAField - } - if !time.IsZero() { - if err := CheckTime(time); err != nil { - return nil, err - } - } - - for key, value := range fields { - if fv, ok := value.(float64); ok { - // Ensure the caller validates and handles invalid field values - if math.IsNaN(fv) { - return nil, fmt.Errorf("NaN is an unsupported value for field %s", key) - } - } - if len(key) == 0 { - return nil, fmt.Errorf("all fields must have non-empty names") - } - } - - return &point{ - key: MakeKey([]byte(name), tags), - time: time, - fields: fields.MarshalBinary(), - }, nil -} - -// NewPointFromBytes returns a new Point from a marshalled Point. -func NewPointFromBytes(b []byte) (Point, error) { - p := &point{} - if err := p.UnmarshalBinary(b); err != nil { - return nil, err - } - if len(p.Fields()) == 0 { - return nil, ErrPointMustHaveAField - } - return p, nil -} - -// MustNewPoint returns a new point with the given measurement name, tags, fields and timestamp. If -// an unsupported field value (NaN) is passed, this function panics. -func MustNewPoint(name string, tags Tags, fields Fields, time time.Time) Point { - pt, err := NewPoint(name, tags, fields, time) - if err != nil { - panic(err.Error()) - } - return pt -} - -func (p *point) Data() []byte { - return p.data -} - -func (p *point) SetData(b []byte) { - p.data = b -} - -func (p *point) Key() []byte { - return p.key -} - -func (p *point) name() []byte { - _, name := scanTo(p.key, 0, ',') - return name -} - -// Name return the measurement name for the point -func (p *point) Name() string { - if p.cachedName != "" { - return p.cachedName - } - p.cachedName = string(escape.Unescape(p.name())) - return p.cachedName -} - -// SetName updates the measurement name for the point -func (p *point) SetName(name string) { - p.cachedName = "" - p.key = MakeKey([]byte(name), p.Tags()) -} - -// Time return the timestamp for the point -func (p *point) Time() time.Time { - return p.time -} - -// SetTime updates the timestamp for the point -func (p *point) SetTime(t time.Time) { - p.time = t -} - -// Tags returns the tag set for the point -func (p *point) Tags() Tags { - return parseTags(p.key) -} - -func parseTags(buf []byte) Tags { - tags := map[string]string{} - - if len(buf) != 0 { - pos, name := scanTo(buf, 0, ',') - - // it's an empyt key, so there are no tags - if len(name) == 0 { - return tags - } - - i := pos + 1 - var key, value []byte - for { - if i >= len(buf) { - break - } - i, key = scanTo(buf, i, '=') - i, value = scanTagValue(buf, i+1) - - if len(value) == 0 { - continue - } - - tags[string(unescapeTag(key))] = string(unescapeTag(value)) - - i++ - } - } - return tags -} - -// MakeKey creates a key for a set of tags. -func MakeKey(name []byte, tags Tags) []byte { - // unescape the name and then re-escape it to avoid double escaping. - // The key should always be stored in escaped form. - return append(escapeMeasurement(unescapeMeasurement(name)), tags.HashKey()...) -} - -// SetTags replaces the tags for the point -func (p *point) SetTags(tags Tags) { - p.key = MakeKey([]byte(p.Name()), tags) -} - -// AddTag adds or replaces a tag value for a point -func (p *point) AddTag(key, value string) { - tags := p.Tags() - tags[key] = value - p.key = MakeKey([]byte(p.Name()), tags) -} - -// Fields returns the fields for the point -func (p *point) Fields() Fields { - if p.cachedFields != nil { - return p.cachedFields - } - p.cachedFields = p.unmarshalBinary() - return p.cachedFields -} - -// SetPrecision will round a time to the specified precision -func (p *point) SetPrecision(precision string) { - switch precision { - case "n": - case "u": - p.SetTime(p.Time().Truncate(time.Microsecond)) - case "ms": - p.SetTime(p.Time().Truncate(time.Millisecond)) - case "s": - p.SetTime(p.Time().Truncate(time.Second)) - case "m": - p.SetTime(p.Time().Truncate(time.Minute)) - case "h": - p.SetTime(p.Time().Truncate(time.Hour)) - } -} - -func (p *point) String() string { - if p.Time().IsZero() { - return string(p.Key()) + " " + string(p.fields) - } - return string(p.Key()) + " " + string(p.fields) + " " + strconv.FormatInt(p.UnixNano(), 10) -} - -func (p *point) MarshalBinary() ([]byte, error) { - tb, err := p.time.MarshalBinary() - if err != nil { - return nil, err - } - - b := make([]byte, 8+len(p.key)+len(p.fields)+len(tb)) - i := 0 - - binary.BigEndian.PutUint32(b[i:], uint32(len(p.key))) - i += 4 - - i += copy(b[i:], p.key) - - binary.BigEndian.PutUint32(b[i:i+4], uint32(len(p.fields))) - i += 4 - - i += copy(b[i:], p.fields) - - copy(b[i:], tb) - return b, nil -} - -func (p *point) UnmarshalBinary(b []byte) error { - var i int - keyLen := int(binary.BigEndian.Uint32(b[:4])) - i += int(4) - - p.key = b[i : i+keyLen] - i += keyLen - - fieldLen := int(binary.BigEndian.Uint32(b[i : i+4])) - i += int(4) - - p.fields = b[i : i+fieldLen] - i += fieldLen - - p.time = time.Now() - p.time.UnmarshalBinary(b[i:]) - return nil -} - -func (p *point) PrecisionString(precision string) string { - if p.Time().IsZero() { - return fmt.Sprintf("%s %s", p.Key(), string(p.fields)) - } - return fmt.Sprintf("%s %s %d", p.Key(), string(p.fields), - p.UnixNano()/GetPrecisionMultiplier(precision)) -} - -func (p *point) RoundedString(d time.Duration) string { - if p.Time().IsZero() { - return fmt.Sprintf("%s %s", p.Key(), string(p.fields)) - } - return fmt.Sprintf("%s %s %d", p.Key(), string(p.fields), - p.time.Round(d).UnixNano()) -} - -func (p *point) unmarshalBinary() Fields { - return newFieldsFromBinary(p.fields) -} - -func (p *point) HashID() uint64 { - h := fnv.New64a() - h.Write(p.key) - sum := h.Sum64() - return sum -} - -func (p *point) UnixNano() int64 { - return p.Time().UnixNano() -} - -// Tags represents a mapping between a Point's tag names and their -// values. -type Tags map[string]string - -// HashKey hashes all of a tag's keys. -func (t Tags) HashKey() []byte { - // Empty maps marshal to empty bytes. - if len(t) == 0 { - return nil - } - - escaped := Tags{} - for k, v := range t { - ek := escapeTag([]byte(k)) - ev := escapeTag([]byte(v)) - - if len(ev) > 0 { - escaped[string(ek)] = string(ev) - } - } - - // Extract keys and determine final size. - sz := len(escaped) + (len(escaped) * 2) // separators - keys := make([]string, len(escaped)+1) - i := 0 - for k, v := range escaped { - keys[i] = k - i++ - sz += len(k) + len(v) - } - keys = keys[:i] - sort.Strings(keys) - // Generate marshaled bytes. - b := make([]byte, sz) - buf := b - idx := 0 - for _, k := range keys { - buf[idx] = ',' - idx++ - copy(buf[idx:idx+len(k)], k) - idx += len(k) - buf[idx] = '=' - idx++ - v := escaped[k] - copy(buf[idx:idx+len(v)], v) - idx += len(v) - } - return b[:idx] -} - -// Fields represents a mapping between a Point's field names and their -// values. -type Fields map[string]interface{} - -func parseNumber(val []byte) (interface{}, error) { - if val[len(val)-1] == 'i' { - val = val[:len(val)-1] - return strconv.ParseInt(string(val), 10, 64) - } - for i := 0; i < len(val); i++ { - // If there is a decimal or an N (NaN), I (Inf), parse as float - if val[i] == '.' || val[i] == 'N' || val[i] == 'n' || val[i] == 'I' || val[i] == 'i' || val[i] == 'e' { - return strconv.ParseFloat(string(val), 64) - } - if val[i] < '0' && val[i] > '9' { - return string(val), nil - } - } - return strconv.ParseFloat(string(val), 64) -} - -func newFieldsFromBinary(buf []byte) Fields { - fields := Fields{} - var ( - i int - name, valueBuf []byte - value interface{} - err error - ) - for { - if i >= len(buf) { - break - } - - i, name = scanTo(buf, i, '=') - name = escape.Unescape(name) - - i, valueBuf = scanFieldValue(buf, i+1) - if len(name) > 0 { - if len(valueBuf) == 0 { - fields[string(name)] = nil - continue - } - - // If the first char is a double-quote, then unmarshal as string - if valueBuf[0] == '"' { - value = unescapeStringField(string(valueBuf[1 : len(valueBuf)-1])) - // Check for numeric characters and special NaN or Inf - } else if (valueBuf[0] >= '0' && valueBuf[0] <= '9') || valueBuf[0] == '-' || valueBuf[0] == '.' || - valueBuf[0] == 'N' || valueBuf[0] == 'n' || // NaN - valueBuf[0] == 'I' || valueBuf[0] == 'i' { // Inf - - value, err = parseNumber(valueBuf) - if err != nil { - panic(fmt.Sprintf("unable to parse number value '%v': %v", string(valueBuf), err)) - } - - // Otherwise parse it as bool - } else { - value, err = strconv.ParseBool(string(valueBuf)) - if err != nil { - panic(fmt.Sprintf("unable to parse bool value '%v': %v\n", string(valueBuf), err)) - } - } - fields[string(name)] = value - } - i++ - } - return fields -} - -// MarshalBinary encodes all the fields to their proper type and returns the binary -// represenation -// NOTE: uint64 is specifically not supported due to potential overflow when we decode -// again later to an int64 -func (p Fields) MarshalBinary() []byte { - b := []byte{} - keys := make([]string, len(p)) - i := 0 - for k := range p { - keys[i] = k - i++ - } - sort.Strings(keys) - - for _, k := range keys { - v := p[k] - b = append(b, []byte(escape.String(k))...) - b = append(b, '=') - switch t := v.(type) { - case int: - b = append(b, []byte(strconv.FormatInt(int64(t), 10))...) - b = append(b, 'i') - case int8: - b = append(b, []byte(strconv.FormatInt(int64(t), 10))...) - b = append(b, 'i') - case int16: - b = append(b, []byte(strconv.FormatInt(int64(t), 10))...) - b = append(b, 'i') - case int32: - b = append(b, []byte(strconv.FormatInt(int64(t), 10))...) - b = append(b, 'i') - case int64: - b = append(b, []byte(strconv.FormatInt(t, 10))...) - b = append(b, 'i') - case uint: - b = append(b, []byte(strconv.FormatInt(int64(t), 10))...) - b = append(b, 'i') - case uint8: - b = append(b, []byte(strconv.FormatInt(int64(t), 10))...) - b = append(b, 'i') - case uint16: - b = append(b, []byte(strconv.FormatInt(int64(t), 10))...) - b = append(b, 'i') - case uint32: - b = append(b, []byte(strconv.FormatInt(int64(t), 10))...) - b = append(b, 'i') - case float32: - val := []byte(strconv.FormatFloat(float64(t), 'f', -1, 32)) - b = append(b, val...) - case float64: - val := []byte(strconv.FormatFloat(t, 'f', -1, 64)) - b = append(b, val...) - case bool: - b = append(b, []byte(strconv.FormatBool(t))...) - case []byte: - b = append(b, t...) - case string: - b = append(b, '"') - b = append(b, []byte(escapeStringField(t))...) - b = append(b, '"') - case nil: - // skip - default: - // Can't determine the type, so convert to string - b = append(b, '"') - b = append(b, []byte(escapeStringField(fmt.Sprintf("%v", v)))...) - b = append(b, '"') - - } - b = append(b, ',') - } - if len(b) > 0 { - return b[0 : len(b)-1] - } - return b -} - -type indexedSlice struct { - indices []int - b []byte -} - -func (s *indexedSlice) Less(i, j int) bool { - _, a := scanTo(s.b, s.indices[i], '=') - _, b := scanTo(s.b, s.indices[j], '=') - return bytes.Compare(a, b) < 0 -} - -func (s *indexedSlice) Swap(i, j int) { - s.indices[i], s.indices[j] = s.indices[j], s.indices[i] -} - -func (s *indexedSlice) Len() int { - return len(s.indices) -} diff --git a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/rows.go b/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/rows.go deleted file mode 100644 index 72435f5c..00000000 --- a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/rows.go +++ /dev/null @@ -1,60 +0,0 @@ -package models - -import ( - "hash/fnv" - "sort" -) - -// Row represents a single row returned from the execution of a statement. -type Row struct { - Name string `json:"name,omitempty"` - Tags map[string]string `json:"tags,omitempty"` - Columns []string `json:"columns,omitempty"` - Values [][]interface{} `json:"values,omitempty"` - Err error `json:"err,omitempty"` -} - -// SameSeries returns true if r contains values for the same series as o. -func (r *Row) SameSeries(o *Row) bool { - return r.tagsHash() == o.tagsHash() && r.Name == o.Name -} - -// tagsHash returns a hash of tag key/value pairs. -func (r *Row) tagsHash() uint64 { - h := fnv.New64a() - keys := r.tagsKeys() - for _, k := range keys { - h.Write([]byte(k)) - h.Write([]byte(r.Tags[k])) - } - return h.Sum64() -} - -// tagKeys returns a sorted list of tag keys. -func (r *Row) tagsKeys() []string { - a := make([]string, 0, len(r.Tags)) - for k := range r.Tags { - a = append(a, k) - } - sort.Strings(a) - return a -} - -// Rows represents a collection of rows. Rows implements sort.Interface. -type Rows []*Row - -func (p Rows) Len() int { return len(p) } - -func (p Rows) Less(i, j int) bool { - // Sort by name first. - if p[i].Name != p[j].Name { - return p[i].Name < p[j].Name - } - - // Sort by tag set hash. Tags don't have a meaningful sort order so we - // just compute a hash and sort by that instead. This allows the tests - // to receive rows in a predictable order every time. - return p[i].tagsHash() < p[j].tagsHash() -} - -func (p Rows) Swap(i, j int) { p[i], p[j] = p[j], p[i] } diff --git a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/time.go b/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/time.go deleted file mode 100644 index 9e415777..00000000 --- a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/models/time.go +++ /dev/null @@ -1,51 +0,0 @@ -package models - -// Helper time methods since parsing time can easily overflow and we only support a -// specific time range. - -import ( - "fmt" - "math" - "time" -) - -var ( - // MaxNanoTime is the maximum time that can be represented via int64 nanoseconds since the epoch. - MaxNanoTime = time.Unix(0, math.MaxInt64).UTC() - // MinNanoTime is the minumum time that can be represented via int64 nanoseconds since the epoch. - MinNanoTime = time.Unix(0, math.MinInt64).UTC() - - // ErrTimeOutOfRange gets returned when time is out of the representable range using int64 nanoseconds since the epoch. - ErrTimeOutOfRange = fmt.Errorf("time outside range %s - %s", MinNanoTime, MaxNanoTime) -) - -// SafeCalcTime safely calculates the time given. Will return error if the time is outside the -// supported range. -func SafeCalcTime(timestamp int64, precision string) (time.Time, error) { - mult := GetPrecisionMultiplier(precision) - if t, ok := safeSignedMult(timestamp, mult); ok { - return time.Unix(0, t).UTC(), nil - } - - return time.Time{}, ErrTimeOutOfRange -} - -// CheckTime checks that a time is within the safe range. -func CheckTime(t time.Time) error { - if t.Before(MinNanoTime) || t.After(MaxNanoTime) { - return ErrTimeOutOfRange - } - return nil -} - -// Perform the multiplication and check to make sure it didn't overflow. -func safeSignedMult(a, b int64) (int64, bool) { - if a == 0 || b == 0 || a == 1 || b == 1 { - return a * b, true - } - if a == math.MinInt64 || b == math.MaxInt64 { - return 0, false - } - c := a * b - return c, c/b == a -} diff --git a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/pkg/escape/bytes.go b/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/pkg/escape/bytes.go deleted file mode 100644 index 15e9cf29..00000000 --- a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/pkg/escape/bytes.go +++ /dev/null @@ -1,45 +0,0 @@ -package escape - -import "bytes" - -func Bytes(in []byte) []byte { - for b, esc := range Codes { - in = bytes.Replace(in, []byte{b}, esc, -1) - } - return in -} - -func Unescape(in []byte) []byte { - i := 0 - inLen := len(in) - var out []byte - - for { - if i >= inLen { - break - } - if in[i] == '\\' && i+1 < inLen { - switch in[i+1] { - case ',': - out = append(out, ',') - i += 2 - continue - case '"': - out = append(out, '"') - i += 2 - continue - case ' ': - out = append(out, ' ') - i += 2 - continue - case '=': - out = append(out, '=') - i += 2 - continue - } - } - out = append(out, in[i]) - i += 1 - } - return out -} diff --git a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/pkg/escape/strings.go b/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/pkg/escape/strings.go deleted file mode 100644 index 330fbf42..00000000 --- a/core/poller/Godeps/_workspace/src/github.com/influxdata/influxdb/pkg/escape/strings.go +++ /dev/null @@ -1,34 +0,0 @@ -package escape - -import "strings" - -var ( - Codes = map[byte][]byte{ - ',': []byte(`\,`), - '"': []byte(`\"`), - ' ': []byte(`\ `), - '=': []byte(`\=`), - } - - codesStr = map[string]string{} -) - -func init() { - for k, v := range Codes { - codesStr[string(k)] = string(v) - } -} - -func UnescapeString(in string) string { - for b, esc := range codesStr { - in = strings.Replace(in, esc, b, -1) - } - return in -} - -func String(in string) string { - for b, esc := range codesStr { - in = strings.Replace(in, b, esc, -1) - } - return in -} diff --git a/core/poller/Makefile b/core/poller/Makefile index 8d9bd8eb..c71e85e8 100644 --- a/core/poller/Makefile +++ b/core/poller/Makefile @@ -1,2 +1,34 @@ -all: - go build ./... +PACKAGES=`find ./cmd -mindepth 1 -maxdepth 1 -type d` +LIBRARYS= + +all: clean man build + +clean: + for p in $(PACKAGES); do rm -f `echo $${p}|cut -d/ -f3`{,.1,.1.gz}; done + +build: + for p in $(PACKAGES); do go build -ldflags "-w -s" $${p}; done + +linux: + for p in $(PACKAGES); do GOOS=linux go build -ldflags "-w -s" $${p}; done + +install: + go install -ldflags "-w -s" ./... + +test: lint + for p in $(PACKAGES) $(LIBRARYS); do go test -race -covermode=atomic $${p}; done + +lint: + goimports -l $(PACKAGES) $(LIBRARYS) + gofmt -l $(PACKAGES) $(LIBRARYS) + errcheck $(PACKAGES) $(LIBRARYS) + golint $(PACKAGES) $(LIBRARYS) + go vet $(PACKAGES) $(LIBRARYS) + +man: + script/build_manpages.sh ./ + +deps: + rm -rf Godeps vendor + godep save ./... + godep update ./... diff --git a/core/poller/Procfile b/core/poller/Procfile deleted file mode 100644 index a4fca2b1..00000000 --- a/core/poller/Procfile +++ /dev/null @@ -1 +0,0 @@ -worker: unifi diff --git a/core/poller/README.md b/core/poller/README.md index 1c46343d..13c95a62 100644 --- a/core/poller/README.md +++ b/core/poller/README.md @@ -1,32 +1,20 @@ # Unifi -Collect your Unifi client data every 15 seconds and send it to an InfluxDB instance. +Collect your Unifi Controller Client data and send it to an InfluxDB instance. -![image](https://cloud.githubusercontent.com/assets/79995/19002122/6b81f928-86ff-11e6-8ab4-d67f943588f4.png) +![image](https://raw.githubusercontent.com/davidnewhall/unifi/master/grafana-unifi-dashboard.png) ## Deploying -The repository is ready for deployment on Heroku. Steps to deploy: Clone the repository and using `.env.example` create your own `.env` file with your Unifi GUI and InfluxDB credentials. -Create your heroku application: + +Set your environment variables before running: ``` -heroku create [name] +source .env ; ./unifi-poller ``` -Set your environment variables before deploying: - -``` -heroku config:set $(cat .env | grep -v ^# | xargs) -``` - -Push to heroku: - -``` -git push heroku master -``` - -## Copyright +## Copyright & License Copyright © 2016 Garrett Bjerkhoel. See [MIT-LICENSE](http://github.com/dewski/unifi/blob/master/MIT-LICENSE) for details. diff --git a/core/poller/app.json b/core/poller/app.json deleted file mode 100644 index 243a9b6c..00000000 --- a/core/poller/app.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "name": "unifi", - "description": "Data logger for Unifi controllers", - "keywords": [ - "go", - ], - "image": "heroku/go:1.6", - "mount_dir": "src/github.com/dewski/unifi", - "website": "https://github.com/dewski/unifi", - "repository": "https://github.com/dewski/unifi" -} diff --git a/core/poller/cmd/unifi-poller/README.md b/core/poller/cmd/unifi-poller/README.md new file mode 100644 index 00000000..9a2e066f --- /dev/null +++ b/core/poller/cmd/unifi-poller/README.md @@ -0,0 +1,51 @@ +unifi-poller(1) -- Utility to poll Unifi Metrics and drop them into InfluxDB +=== + +## SYNOPSIS + +`unifi-poller -c /usr/local/etc/unifi-poller.conf` + +## DESCRIPTION + +* This application polls a Unifi Controller API for Client and Device Metrics. +* The metrics are then stored in an InfluxDB instance. + +## OPTIONS + +`unifi-poller [-c ] [-h] [-v]` + + -c, --config + Provide a configuration file (instead of the default). + + -v, --version + Display version and exit. + + -h, --help + Display usage and exit. + + +## GO DURATION + +This application uses the Go Time Durations for a polling interval. +The format is an integer followed by a time unit. You may append multiple time units +to add them together. Some valid time units are: + + `us` (microsecond) + `ns` (nanosecond) + `ms` (millisecond) + `s` (second) + `m` (minute) + `h` (hour) + +Example Use: `1m`, `5h`, `100ms`, `17s`, `1s45ms`, `1m3s` + +## AUTHOR + +* Garrett Bjerkhoel (original code) ~ 2016 +* David Newhall II (rewritten) ~ 4/20/2018 + +## LOCATION + +* https://github.com/davidnewhall/unifi-poller +* /usr/local/bin/unifi-poller +* previously: https://github.com/dewski/unifi diff --git a/core/poller/clients.go b/core/poller/cmd/unifi-poller/clients.go similarity index 82% rename from core/poller/clients.go rename to core/poller/cmd/unifi-poller/clients.go index 44ff1237..ca6dd04d 100644 --- a/core/poller/clients.go +++ b/core/poller/cmd/unifi-poller/clients.go @@ -1,7 +1,6 @@ package main import ( - "log" "strconv" "time" @@ -46,6 +45,9 @@ type Client struct { BytesR int64 `json:"bytes-r"` Ccq int64 `json:"ccq"` Channel int `json:"channel"` + DevCat int `json:"dev_cat"` + DevFamily int `json:"dev_family"` + DevID int `json:"dev_id"` DpiStats []DpiStat `json:"dpi_stats"` DpiStatsLastUpdated int64 `json:"dpi_stats_last_updated"` Essid string `json:"essid"` @@ -67,6 +69,8 @@ type Client struct { Noise int64 `json:"noise"` Note string `json:"note"` Noted bool `json:"noted"` + OsClass int `json:"os_class"` + OsName int `json:"os_name"` Oui string `json:"oui"` PowersaveEnabled bool `json:"powersave_enabled"` QosPolicyApplied bool `json:"qos_policy_applied"` @@ -94,10 +98,16 @@ type Client struct { UserGroupID string `json:"usergroup_id"` UseFixedIP bool `json:"use_fixedip"` Vlan int `json:"vlan"` + WiredRxBytes int64 `json:"wired-rx_bytes"` + WiredRxBytesR int64 `json:"wired-rx_bytes-r"` + WiredRxPackets int64 `json:"wired-rx_packets"` + WiredTxBytes int64 `json:"wired-tx_bytes"` + WiredTxBytesR int64 `json:"wired-tx_bytes-r"` + WiredTxPackets int64 `json:"wired-tx_packets"` } // Point generates a client's datapoint for InfluxDB. -func (c Client) Point() *influx.Point { +func (c Client) Point() (*influx.Point, error) { if c.Name == "" && c.Hostname != "" { c.Name = c.Hostname } else if c.Hostname == "" && c.Name != "" { @@ -111,23 +121,23 @@ func (c Client) Point() *influx.Point { "mac": c.Mac, "user_id": c.UserID, "site_id": c.SiteID, - "ip": c.IP, - "fixed_ip": c.FixedIP, - "essid": c.Essid, - "bssid": c.Bssid, - "network": c.Network, "network_id": c.NetworkID, "usergroup_id": c.UserGroupID, "ap_mac": c.ApMac, "gw_mac": c.GwMac, "sw_mac": c.SwMac, - "sw_port": strconv.Itoa(c.SwPort), "oui": c.Oui, - "name": c.Name, - "hostname": c.Hostname, "radio_name": c.RadioName, "radio": c.Radio, "radio_proto": c.RadioProto, + "name": c.Name, + "fixed_ip": c.FixedIP, + "sw_port": strconv.Itoa(c.SwPort), + "os_class": strconv.Itoa(c.OsClass), + "os_name": strconv.Itoa(c.OsName), + "dev_cat": strconv.Itoa(c.DevCat), + "dev_id": strconv.Itoa(c.DevID), + "dev_family": strconv.Itoa(c.DevFamily), "authorized": strconv.FormatBool(c.Authorized), "is_11r": strconv.FormatBool(c.Is11R), "is_wired": strconv.FormatBool(c.IsWired), @@ -143,6 +153,10 @@ func (c Client) Point() *influx.Point { "vlan": strconv.Itoa(c.Vlan), } fields := map[string]interface{}{ + "ip": c.IP, + "essid": c.Essid, + "bssid": c.Bssid, + "hostname": c.Hostname, "dpi_stats_last_updated": c.DpiStatsLastUpdated, "last_seen_by_uap": c.LastSeenByUAP, "last_seen_by_ugw": c.LastSeenByUGW, @@ -157,6 +171,7 @@ func (c Client) Point() *influx.Point { "idle_time": c.IdleTime, "last_seen": c.LastSeen, "latest_assoc_time": c.LatestAssocTime, + "network": c.Network, "noise": c.Noise, "note": c.Note, "roam_count": c.RoamCount, @@ -172,12 +187,13 @@ func (c Client) Point() *influx.Point { "tx_power": c.TxPower, "tx_rate": c.TxRate, "uptime": c.Uptime, + "wired-rx_bytes": c.WiredRxBytes, + "wired-rx_bytes-r": c.WiredRxBytesR, + "wired-rx_packets": c.WiredRxPackets, + "wired-tx_bytes": c.WiredTxBytes, + "wired-tx_bytes-r": c.WiredTxBytesR, + "wired-tx_packets": c.WiredTxPackets, } - pt, err := influx.NewPoint("clients", tags, fields, time.Now()) - if err != nil { - log.Println("Error creating point:", err) - return nil - } - return pt + return influx.NewPoint("clients", tags, fields, time.Now()) } diff --git a/core/poller/config.go b/core/poller/cmd/unifi-poller/config.go similarity index 76% rename from core/poller/config.go rename to core/poller/cmd/unifi-poller/config.go index fd595e20..1a17949a 100644 --- a/core/poller/config.go +++ b/core/poller/cmd/unifi-poller/config.go @@ -16,12 +16,20 @@ const ( NetworkPath = "/api/s/default/rest/networkconf" // UserGroupPath contains usergroup configurations. UserGroupPath = "/api/s/default/rest/usergroup" + // App defaults in case they're missing from the config. + defaultInterval = 30 * time.Second + defaultInfxDb = "unifi" + defaultInfxUser = "unifi" + defaultInfxPass = "unifi" + defaultInfxURL = "http://127.0.0.1:8086" + defaultUnifUser = "influx" + defaultUnifURL = "https://127.0.0.1:8443" ) // Config represents the data needed to poll a controller and report to influxdb. type Config struct { Interval time.Duration `json:"interval",toml:"interval",yaml:"interval"` - InfluxAddr string `json:"influx_addr",toml:"influx_addr",yaml:"influx_addr"` + InfluxURL string `json:"influx_url",toml:"influx_addr",yaml:"influx_addr"` InfluxUser string `json:"influx_user",toml:"influx_user",yaml:"influx_user"` InfluxPass string `json:"influx_pass",toml:"influx_pass",yaml:"influx_pass"` InfluxDB string `json:"influx_db",toml:"influx_db",yaml:"influx_db"` diff --git a/core/poller/cmd/unifi-poller/devices.go b/core/poller/cmd/unifi-poller/devices.go new file mode 100644 index 00000000..06ab7d0f --- /dev/null +++ b/core/poller/cmd/unifi-poller/devices.go @@ -0,0 +1 @@ +package main diff --git a/core/poller/main.go b/core/poller/cmd/unifi-poller/main.go similarity index 77% rename from core/poller/main.go rename to core/poller/cmd/unifi-poller/main.go index b9cb9727..505873ca 100644 --- a/core/poller/main.go +++ b/core/poller/cmd/unifi-poller/main.go @@ -4,7 +4,6 @@ import ( "bytes" "crypto/tls" "encoding/json" - "errors" "io/ioutil" "log" "net/http" @@ -13,6 +12,7 @@ import ( "time" influx "github.com/influxdata/influxdb/client/v2" + "github.com/pkg/errors" ) func main() { @@ -20,16 +20,17 @@ func main() { if err := config.AuthController(); err != nil { log.Fatal(err) } - log.Println("Successfully authenticated to Unifi Controller!") + log.Println("Authenticated to Unifi Controller", config.UnifiBase, "as user", config.UnifiUser) infdb, err := influx.NewHTTPClient(influx.HTTPConfig{ - Addr: config.InfluxAddr, + Addr: config.InfluxURL, Username: config.InfluxUser, Password: config.InfluxPass, }) if err != nil { log.Fatal(err) } + log.Println("Logging Unifi Metrics to InfluXDB @", config.InfluxURL, "as user", config.InfluxUser) log.Println("Polling Unifi Controller, interval:", config.Interval) config.PollUnifiController(infdb) } @@ -37,21 +38,21 @@ func main() { // GetConfig parses and returns our configuration data. func GetConfig() Config { // TODO: A real config file. - var err error - config := Config{ - InfluxAddr: os.Getenv("INFLUXDB_ADDR"), + interval, err := time.ParseDuration(os.Getenv("INTERVAL")) + if err != nil { + log.Println("Invalid Interval, defaulting to", defaultInterval) + interval = time.Duration(defaultInterval) + } + return Config{ + InfluxURL: os.Getenv("INFLUXDB_URL"), InfluxUser: os.Getenv("INFLUXDB_USERNAME"), InfluxPass: os.Getenv("INFLUXDB_PASSWORD"), InfluxDB: os.Getenv("INFLUXDB_DATABASE"), UnifiUser: os.Getenv("UNIFI_USERNAME"), UnifiPass: os.Getenv("UNIFI_PASSWORD"), UnifiBase: "https://" + os.Getenv("UNIFI_ADDR") + ":" + os.Getenv("UNIFI_PORT"), + Interval: interval, } - if config.Interval, err = time.ParseDuration(os.Getenv("INTERVAL")); err != nil { - log.Println("Invalid Interval, defaulting to 15 seconds.") - config.Interval = time.Duration(time.Second * 15) - } - return config } // AuthController creates a http.Client with authenticated cookies. @@ -60,18 +61,19 @@ func (c *Config) AuthController() error { json := `{"username": "` + c.UnifiUser + `","password": "` + c.UnifiPass + `"}` jar, err := cookiejar.New(nil) if err != nil { - return err + return errors.Wrap(err, "cookiejar.New(nil)") } c.uniClient = &http.Client{ Transport: &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}}, Jar: jar, } if req, err := c.uniRequest(LoginPath, json); err != nil { - return err + return errors.Wrap(err, "c.uniRequest(LoginPath, json)") } else if resp, err := c.uniClient.Do(req); err != nil { - return err + return errors.Wrap(err, "c.uniClient.Do(req)") } else if resp.StatusCode != http.StatusOK { - return errors.New("Error Authenticating with Unifi Controller") + return errors.Errorf("authentication failed (%v): %v (status: %v/%v)", + c.UnifiUser, c.UnifiBase+LoginPath, resp.StatusCode, resp.Status) } return nil } @@ -94,7 +96,11 @@ func (c *Config) PollUnifiController(infdb influx.Client) { } for _, client := range clients { - bp.AddPoint(client.Point()) + if pt, errr := client.Point(); errr != nil { + log.Println("client.Point():", errr) + } else { + bp.AddPoint(pt) + } } if err = infdb.Write(bp); err != nil { log.Println("infdb.Write(bp):", err) diff --git a/core/poller/Screen Shot 2018-04-22 at 12.28.14 PM.png b/core/poller/grafana-unifi-dashboard.png similarity index 100% rename from core/poller/Screen Shot 2018-04-22 at 12.28.14 PM.png rename to core/poller/grafana-unifi-dashboard.png diff --git a/core/poller/script/build_manpages.sh b/core/poller/script/build_manpages.sh new file mode 100755 index 00000000..a843e5ef --- /dev/null +++ b/core/poller/script/build_manpages.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +OUTPUT=$1 + +# This requires the installation of `ronn`: sudo gem install ronn +for f in cmd/*/README.md;do + # Strtip off cmd/ then strip off README to get the man-file name. + PKGNOCMD="${f#cmd/}" + PKG="${PKGNOCMD%/README.md}" + echo "Creating Man Page: ${f} -> ${OUTPUT}${PKG}.1.gz" + ronn < "$f" | gzip -9 > "${OUTPUT}${PKG}.1.gz" +done diff --git a/core/poller/script/server b/core/poller/script/server deleted file mode 100644 index f7ddd9f3..00000000 --- a/core/poller/script/server +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -set -e - -# Load the environment variables needed for testing -export $(cat .env | grep -v ^# | xargs) - -go clean -go build -o unifi -./unifi diff --git a/core/poller/unifi-poller-grafana-dashboard.json b/core/poller/unifi-poller-grafana-dashboard.json new file mode 100644 index 00000000..af41b0bb --- /dev/null +++ b/core/poller/unifi-poller-grafana-dashboard.json @@ -0,0 +1,2217 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 1, + "id": 6, + "links": [], + "panels": [ + { + "columns": [], + "datasource": "Unifi", + "description": "", + "fontSize": "80%", + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 21, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 1, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "link": true, + "linkTargetBlank": true, + "linkUrl": "http://sleepers.pro", + "pattern": "Time", + "type": "hidden" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "groupBy": [ + { + "params": [ + "mac" + ], + "type": "tag" + }, + { + "params": [ + "oui" + ], + "type": "tag" + }, + { + "params": [ + "channel" + ], + "type": "tag" + }, + { + "params": [ + "radio" + ], + "type": "tag" + }, + { + "params": [ + "name" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "refId": "A", + "resultFormat": "table", + "select": [ + [ + { + "params": [ + "ip" + ], + "type": "field" + }, + { + "params": [], + "type": "last" + }, + { + "params": [ + "Address" + ], + "type": "alias" + } + ], + [ + { + "params": [ + "note" + ], + "type": "field" + }, + { + "params": [], + "type": "last" + }, + { + "params": [ + "Note" + ], + "type": "alias" + } + ], + [ + { + "params": [ + "network" + ], + "type": "field" + }, + { + "params": [], + "type": "last" + }, + { + "params": [ + "Network" + ], + "type": "alias" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + } + ], + "timeFrom": "1h", + "title": "Wireless Devices", + "transform": "table", + "type": "table" + }, + { + "columns": [], + "datasource": "Unifi", + "description": "", + "fontSize": "80%", + "gridPos": { + "h": 10, + "w": 12, + "x": 12, + "y": 0 + }, + "hideTimeOverride": false, + "id": 22, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "link": false, + "linkUrl": "http://$__cell", + "pattern": "Time", + "type": "hidden" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "groupBy": [ + { + "params": [ + "sw_port" + ], + "type": "tag" + }, + { + "params": [ + "mac" + ], + "type": "tag" + }, + { + "params": [ + "oui" + ], + "type": "tag" + }, + { + "params": [ + "use_fixedip" + ], + "type": "tag" + }, + { + "params": [ + "name" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "refId": "A", + "resultFormat": "table", + "select": [ + [ + { + "params": [ + "ip" + ], + "type": "field" + }, + { + "params": [], + "type": "last" + }, + { + "params": [ + "Address" + ], + "type": "alias" + } + ], + [ + { + "params": [ + "note" + ], + "type": "field" + }, + { + "params": [], + "type": "last" + }, + { + "params": [ + "Note" + ], + "type": "alias" + } + ], + [ + { + "params": [ + "network" + ], + "type": "field" + }, + { + "params": [], + "type": "last" + }, + { + "params": [ + "Network" + ], + "type": "alias" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "true" + } + ] + } + ], + "timeFrom": "1h", + "title": "Wired Devices", + "transform": "table", + "type": "table" + }, + { + "aliasColors": {}, + "breakPoint": "50%", + "cacheTimeout": null, + "combine": { + "label": "Others", + "threshold": 0 + }, + "datasource": "Unifi", + "decimals": 0, + "description": "", + "fontSize": "70%", + "format": "short", + "gridPos": { + "h": 8, + "w": 6, + "x": 0, + "y": 10 + }, + "hideTimeOverride": true, + "id": 9, + "interval": null, + "legend": { + "header": "", + "show": false, + "values": true + }, + "legendType": "Under graph", + "links": [], + "maxDataPoints": 3, + "nullPointMode": "connected", + "pieType": "pie", + "strokeWidth": 1, + "targets": [ + { + "alias": "Wired", + "groupBy": [ + { + "params": [ + "channel" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "select count(distinct(hostname)) FROM \"clients\" WHERE time > now() - 10m AND is_wired = 'true' group by channel", + "rawQuery": true, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "hostname" + ], + "type": "field" + }, + { + "params": [], + "type": "distinct" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + }, + { + "alias": "Channel $tag_channel", + "groupBy": [ + { + "params": [ + "$__interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "orderByTime": "ASC", + "policy": "default", + "query": "select count(distinct(hostname)) FROM \"clients\" WHERE time > now() - 10m AND is_wired = 'false' group by channel", + "rawQuery": true, + "refId": "B", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ] + ], + "tags": [] + } + ], + "timeFrom": "1h", + "title": "Wifi Channels", + "transparent": true, + "type": "grafana-piechart-panel", + "valueName": "current" + }, + { + "aliasColors": {}, + "breakPoint": "25%", + "cacheTimeout": null, + "combine": { + "label": "Others", + "threshold": 0 + }, + "datasource": "Unifi", + "decimals": 0, + "description": "", + "fontSize": "70%", + "format": "short", + "gridPos": { + "h": 8, + "w": 6, + "x": 6, + "y": 10 + }, + "hideTimeOverride": true, + "id": 11, + "interval": null, + "legend": { + "header": "", + "percentage": true, + "show": false, + "values": false + }, + "legendType": "Under graph", + "links": [], + "maxDataPoints": 3, + "nullPointMode": "connected", + "pieType": "pie", + "strokeWidth": "3", + "targets": [ + { + "alias": "Upstairs / $tag_radio_proto / $tag_radio / $tag_radio_name", + "groupBy": [ + { + "params": [ + "channel" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "select count(distinct(hostname)) FROM \"clients\" WHERE (time > now() - 10m AND ap_mac = '80:2a:a8:10:ae:87') group by radio_proto, radio, radio_name, ap_mac", + "rawQuery": true, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "hostname" + ], + "type": "field" + }, + { + "params": [], + "type": "distinct" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + }, + { + "alias": "Downstairs / $tag_radio_proto / $tag_radio / $tag_radio_name", + "groupBy": [ + { + "params": [ + "$__interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "orderByTime": "ASC", + "policy": "default", + "query": "select count(distinct(hostname)) FROM \"clients\" WHERE (time > now() - 10m AND ap_mac = '80:2a:a8:10:ae:0c') group by radio_proto, radio, radio_name, ap_mac", + "rawQuery": true, + "refId": "B", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ] + ], + "tags": [] + } + ], + "timeFrom": "1h", + "title": "AP Radio / Clients", + "transparent": true, + "type": "grafana-piechart-panel", + "valueName": "current" + }, + { + "aliasColors": {}, + "breakPoint": "50%", + "cacheTimeout": null, + "combine": { + "label": "Others", + "threshold": 0 + }, + "datasource": "Unifi", + "decimals": 0, + "description": "", + "fontSize": "70%", + "format": "short", + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 10 + }, + "hideTimeOverride": true, + "id": 12, + "interval": null, + "legend": { + "header": "", + "show": true, + "sort": "current", + "sortDesc": true, + "values": true + }, + "legendType": "Right side", + "links": [], + "maxDataPoints": 3, + "nullPointMode": "connected", + "pieType": "pie", + "strokeWidth": 1, + "targets": [ + { + "alias": "$tag_oui", + "groupBy": [ + { + "params": [ + "channel" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "select count(distinct(hostname)) FROM \"clients\" WHERE (time > now() - 60m) group by oui", + "rawQuery": true, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "hostname" + ], + "type": "field" + }, + { + "params": [], + "type": "distinct" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + } + ], + "timeFrom": "1h", + "title": "Client MAC OUI Breakdown", + "transparent": true, + "type": "grafana-piechart-panel", + "valueName": "current" + }, + { + "aliasColors": {}, + "breakPoint": "50%", + "cacheTimeout": null, + "combine": { + "label": "Others", + "threshold": 0 + }, + "datasource": "Unifi", + "decimals": 0, + "description": "", + "fontSize": "70%", + "format": "short", + "gridPos": { + "h": 8, + "w": 6, + "x": 18, + "y": 10 + }, + "hideTimeOverride": true, + "id": 14, + "interval": null, + "legend": { + "header": "", + "show": true, + "values": true + }, + "legendType": "Right side", + "links": [], + "maxDataPoints": 3, + "nullPointMode": "connected", + "pieType": "pie", + "strokeWidth": 1, + "targets": [ + { + "alias": "$tag_os_class, $tag_os_name, $tag_dev_cat, $tag_dev_family, $tag_dev_id,", + "groupBy": [ + { + "params": [ + "channel" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "select count(distinct(hostname)) FROM \"clients\" WHERE (time > now() - 10m) group by os_class, os_name, dev_cat, dev_family, dev_id ", + "rawQuery": true, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "hostname" + ], + "type": "field" + }, + { + "params": [], + "type": "distinct" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + } + ], + "timeFrom": "1h", + "title": "OS/Dev Class/ID Breakdown", + "transparent": true, + "type": "grafana-piechart-panel", + "valueName": "current" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Unifi", + "description": "Bandwidth usage per wireless devices as reported by the UAPs. Does not include amazon devices.", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 3, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": false, + "rightSide": true, + "show": true, + "sort": "avg", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "$tag_name $col", + "groupBy": [ + { + "params": [ + "30s" + ], + "type": "time" + }, + { + "params": [ + "name" + ], + "type": "tag" + }, + { + "params": [ + "none" + ], + "type": "fill" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT derivative(first(\"wired-rx_bytes\"), 1s) FROM \"clients\" WHERE (\"hostname\" = 'dns-rtn') AND $timeFilter GROUP BY time(10s) fill(none)", + "rawQuery": false, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "rx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Rx" + ], + "type": "alias" + } + ], + [ + { + "params": [ + "tx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Tx" + ], + "type": "alias" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + }, + { + "condition": "AND", + "key": "name", + "operator": "!~", + "value": "/^(amazon-)/" + } + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "UAP Bandwidth / Wireless Devices", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "Bps", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Unifi", + "description": "Cameras TX / RX bytes per second (calculated in 30 second buckets).", + "fill": 0, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 18 + }, + "id": 15, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": false, + "rightSide": true, + "show": true, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "$tag_name $col", + "groupBy": [ + { + "params": [ + "30s" + ], + "type": "time" + }, + { + "params": [ + "name" + ], + "type": "tag" + }, + { + "params": [ + "none" + ], + "type": "fill" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT derivative(first(\"wired-rx_bytes\"), 1s) FROM \"clients\" WHERE (\"hostname\" = 'dns-rtn') AND $timeFilter GROUP BY time(10s) fill(none)", + "rawQuery": false, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "wired-rx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Rx" + ], + "type": "alias" + } + ], + [ + { + "params": [ + "wired-tx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Tx" + ], + "type": "alias" + } + ] + ], + "tags": [ + { + "key": "name", + "operator": "=~", + "value": "/^camera-(.*)/" + } + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Switch Bandwidth / Cameras", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "Bps", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Unifi", + "description": "Amazon Devices TX / RX bytes per second (calculated in 30 second buckets).", + "fill": 0, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 25 + }, + "id": 23, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": false, + "rightSide": true, + "show": true, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "w $tag_name $col", + "groupBy": [ + { + "params": [ + "30s" + ], + "type": "time" + }, + { + "params": [ + "name" + ], + "type": "tag" + }, + { + "params": [ + "none" + ], + "type": "fill" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT derivative(first(\"wired-rx_bytes\"), 1s) FROM \"clients\" WHERE (\"hostname\" = 'dns-rtn') AND $timeFilter GROUP BY time(10s) fill(none)", + "rawQuery": false, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "wired-rx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Rx" + ], + "type": "alias" + } + ], + [ + { + "params": [ + "wired-tx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Tx" + ], + "type": "alias" + } + ] + ], + "tags": [ + { + "key": "name", + "operator": "=~", + "value": "/^amazon-(.*)/" + }, + { + "condition": "AND", + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + }, + { + "alias": "e $tag_name $col", + "groupBy": [ + { + "params": [ + "30s" + ], + "type": "time" + }, + { + "params": [ + "name" + ], + "type": "tag" + }, + { + "params": [ + "none" + ], + "type": "fill" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT derivative(first(\"wired-rx_bytes\"), 1s) FROM \"clients\" WHERE (\"hostname\" = 'dns-rtn') AND $timeFilter GROUP BY time(10s) fill(none)", + "rawQuery": false, + "refId": "B", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "rx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Rx" + ], + "type": "alias" + } + ], + [ + { + "params": [ + "tx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Tx" + ], + "type": "alias" + } + ] + ], + "tags": [ + { + "key": "name", + "operator": "=~", + "value": "/^amazon-(.*)/" + }, + { + "condition": "AND", + "key": "is_wired", + "operator": "=", + "value": "true" + } + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Switch Bandwidth / Echos & FireTVs", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "Bps", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Unifi", + "description": "Wired TX / RX bytes per second (calculated in 30 second buckets). Does not include amazon and camera devices.", + "fill": 0, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 25 + }, + "id": 2, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": false, + "rightSide": true, + "show": true, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "$tag_name $col", + "groupBy": [ + { + "params": [ + "30s" + ], + "type": "time" + }, + { + "params": [ + "name" + ], + "type": "tag" + }, + { + "params": [ + "none" + ], + "type": "fill" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT derivative(first(\"wired-rx_bytes\"), 1s) FROM \"clients\" WHERE (\"hostname\" = 'dns-rtn') AND $timeFilter GROUP BY time(10s) fill(none)", + "rawQuery": false, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "wired-rx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Rx" + ], + "type": "alias" + } + ], + [ + { + "params": [ + "wired-tx_bytes" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + }, + { + "params": [ + "1s" + ], + "type": "derivative" + }, + { + "params": [ + "Tx" + ], + "type": "alias" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "true" + }, + { + "condition": "AND", + "key": "name", + "operator": "!~", + "value": "/^(supermic-bmc|amazon-|camera-)/" + } + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Switch Bandwidth / Wired Devices", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "Bps", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ] + }, + { + "backgroundColor": "rgba(128,128,128,0.1)", + "colorMaps": [ + { + "color": "#70dbed", + "text": "Upper 1" + }, + { + "color": "#806eb7", + "text": "Lower 2" + }, + { + "color": "#806eb7", + "text": "Lower 1" + }, + { + "color": "#64b0c8", + "text": "Upper 2" + } + ], + "crosshairColor": "#8F070C", + "datasource": "Unifi", + "description": "Shows which wireless radio a client is connected to.", + "display": "timeline", + "expandFromQueryS": 0, + "extendLastValue": true, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 34 + }, + "highlightOnMouseover": false, + "id": 17, + "legendSortBy": "-ms", + "lineColor": "rgba(0,0,0,0.1)", + "links": [], + "metricNameColor": "#000000", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "rowHeight": 25, + "showLegend": false, + "showLegendNames": true, + "showLegendPercent": true, + "showLegendValues": true, + "showTimeAxis": false, + "targets": [ + { + "alias": "$tag_name", + "groupBy": [ + { + "params": [ + "name" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "bssid" + ], + "type": "field" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + } + ], + "textSize": 12, + "textSizeTime": 9, + "timeTextColor": "#d8d9da", + "title": "Wifi Client / AP", + "transparent": false, + "type": "natel-discrete-panel", + "units": "short", + "valueMaps": [ + { + "op": "=", + "text": "Upper 1", + "value": "80:2a:a8:12:ae:0c" + }, + { + "op": "=", + "text": "Lower 2", + "value": "80:2a:a8:11:ae:87" + }, + { + "op": "=", + "text": "Lower 1", + "value": "80:2a:a8:12:ae:87" + }, + { + "op": "=", + "text": "Upper 2", + "value": "80:2a:a8:11:ae:0c" + } + ], + "valueTextColor": "#000000", + "writeAllValues": true, + "writeLastValue": false, + "writeMetricNames": true + }, + { + "backgroundColor": "rgba(128,128,128,0.1)", + "colorMaps": [ + { + "color": "#CCC", + "text": "N/A" + } + ], + "crosshairColor": "#8F070C", + "datasource": "Unifi", + "description": "Shows IPs assigned to non-static clients.", + "display": "timeline", + "expandFromQueryS": 0, + "extendLastValue": true, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 34 + }, + "highlightOnMouseover": false, + "id": 18, + "legendSortBy": "-ms", + "lineColor": "rgba(0,0,0,0.1)", + "links": [], + "metricNameColor": "#000000", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "rowHeight": 24, + "showDistinctCount": false, + "showLegend": false, + "showLegendNames": true, + "showLegendPercent": true, + "showLegendValues": true, + "showTimeAxis": false, + "showTransitionCount": false, + "targets": [ + { + "alias": "$tag_name", + "groupBy": [ + { + "params": [ + "name" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "ip" + ], + "type": "field" + } + ] + ], + "tags": [ + { + "key": "use_fixedip", + "operator": "=", + "value": "false" + } + ] + } + ], + "textSize": 12, + "textSizeTime": 9, + "timeTextColor": "#d8d9da", + "title": "Client / IP", + "type": "natel-discrete-panel", + "units": "short", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueTextColor": "#000000", + "writeAllValues": false, + "writeLastValue": false, + "writeMetricNames": true + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Unifi", + "fill": 0, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 43 + }, + "id": 7, + "legend": { + "alignAsTable": true, + "avg": false, + "current": true, + "hideEmpty": false, + "hideZero": false, + "max": true, + "min": true, + "rightSide": true, + "show": true, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "$tag_name", + "groupBy": [ + { + "params": [ + "name" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT derivative(first(\"wired-rx_bytes\"), 1s) FROM \"clients\" WHERE (\"hostname\" = 'dns-rtn') AND $timeFilter GROUP BY time(10s) fill(none)", + "rawQuery": false, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "rssi" + ], + "type": "field" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Received Signal Strength Indicator", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "RSSI Quality", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Unifi", + "fill": 0, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 43 + }, + "id": 10, + "legend": { + "alignAsTable": true, + "avg": false, + "current": true, + "hideEmpty": false, + "hideZero": false, + "max": true, + "min": true, + "rightSide": true, + "show": true, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "$tag_name", + "groupBy": [ + { + "params": [ + "name" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT derivative(first(\"wired-rx_bytes\"), 1s) FROM \"clients\" WHERE (\"hostname\" = 'dns-rtn') AND $timeFilter GROUP BY time(10s) fill(none)", + "rawQuery": false, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "signal" + ], + "type": "field" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Signal", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "dBm", + "label": "Signal Level", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Unifi", + "fill": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 52 + }, + "id": 6, + "legend": { + "alignAsTable": true, + "avg": false, + "current": true, + "hideEmpty": false, + "hideZero": false, + "max": true, + "min": true, + "rightSide": true, + "show": true, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "$tag_name", + "groupBy": [ + { + "params": [ + "name" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT derivative(first(\"wired-rx_bytes\"), 1s) FROM \"clients\" WHERE (\"hostname\" = 'dns-rtn') AND $timeFilter GROUP BY time(10s) fill(none)", + "rawQuery": false, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "noise" + ], + "type": "field" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "UAP Client Stats / Noise", + "tooltip": { + "shared": true, + "sort": 1, + "value_type": "individual" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "decimals": 0, + "format": "short", + "label": "Noise", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Unifi", + "description": "", + "fill": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 52 + }, + "id": 19, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": false, + "rightSide": true, + "show": true, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "$tag_name", + "groupBy": [ + { + "params": [ + "name" + ], + "type": "tag" + } + ], + "measurement": "clients", + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT derivative(first(\"wired-rx_bytes\"), 1s) FROM \"clients\" WHERE (\"hostname\" = 'dns-rtn') AND $timeFilter GROUP BY time(10s) fill(none)", + "rawQuery": false, + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "tx_power" + ], + "type": "field" + } + ] + ], + "tags": [ + { + "key": "is_wired", + "operator": "=", + "value": "false" + } + ] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "TX Power", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "none", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ] + } + ], + "refresh": "30s", + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-3h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Unifi Clients", + "uid": "YVR23BZiz", + "version": 25 +}