Merge pull request #900 from svennergr/svennergr/add-syslog
This commit is contained in:
commit
703dff9b1e
|
|
@ -174,13 +174,19 @@
|
|||
# Enable this only if using InfluxDB or Loki. This will leak PII data!
|
||||
save_ids = false
|
||||
|
||||
# Enable collection of UniFi Events (InfluxDB/Loki only).
|
||||
# This may store a lot of information. Only recommended for testing and debugging.
|
||||
# There are no dashboards to display this data. It can be used for annotations.
|
||||
# This is a new (June, 2020) feature. Please provide feedback if you try it out!
|
||||
# Enable this only if using InfluxDB or Loki. This will leak PII data!
|
||||
# Enable collection of UniFi Events using the v1 API (InfluxDB/Loki only).
|
||||
# This uses the legacy /api/s/{site}/stat/event endpoint.
|
||||
# For UDM devices, use save_syslog instead (v2 API).
|
||||
# Enable this only if using InfluxDB or Loki. This may leak PII data!
|
||||
save_events = false
|
||||
|
||||
# Enable collection of UniFi System Log using the v2 API (Loki only).
|
||||
# This uses the /v2/api/site/{site}/system-log/all endpoint.
|
||||
# Recommended for UDM/UDM-Pro/UCG devices running modern firmware.
|
||||
# Provides richer event data including client roaming, admin access, etc.
|
||||
# Enable this only if using Loki. This may leak PII data!
|
||||
save_syslog = false
|
||||
|
||||
# Enable collection of UniFi Alarms (InfluxDB/Loki only).
|
||||
# There are no dashboards to display this data. It can be used for annotations.
|
||||
# This is a new (June, 2020) feature. Please provide feedback if you try it out!
|
||||
|
|
@ -232,6 +238,7 @@
|
|||
# hash_pii = false
|
||||
# save_ids = false
|
||||
# save_events = false
|
||||
# save_syslog = false
|
||||
# save_alarms = false
|
||||
# save_anomalies = false
|
||||
# save_dpi = false
|
||||
|
|
|
|||
2
go.mod
2
go.mod
|
|
@ -11,7 +11,7 @@ require (
|
|||
github.com/prometheus/common v0.67.4
|
||||
github.com/spf13/pflag v1.0.10
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/unpoller/unifi/v5 v5.3.0
|
||||
github.com/unpoller/unifi/v5 v5.4.0
|
||||
golang.org/x/crypto v0.46.0
|
||||
golang.org/x/term v0.38.0
|
||||
golift.io/cnfg v0.2.3
|
||||
|
|
|
|||
4
go.sum
4
go.sum
|
|
@ -75,8 +75,8 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO
|
|||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/unpoller/unifi/v5 v5.3.0 h1:6ykCP4wL5nk/icMu8Qc24ApWD0A5JdCSYxQJIg2FQyg=
|
||||
github.com/unpoller/unifi/v5 v5.3.0/go.mod h1:pa6zv4Oyb1nFEm4qu/8CUv8Q25hQof04Wh2D0RXcTYc=
|
||||
github.com/unpoller/unifi/v5 v5.4.0 h1:bXNjL0lQi9ldrapXI/gLKmepyYEvOlop9zxkG6GPn/s=
|
||||
github.com/unpoller/unifi/v5 v5.4.0/go.mod h1:pa6zv4Oyb1nFEm4qu/8CUv8Q25hQof04Wh2D0RXcTYc=
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ func (u *InputUnifi) collectControllerEvents(c *Controller) ([]any, error) {
|
|||
|
||||
type caller func([]any, []*unifi.Site, *Controller) ([]any, error)
|
||||
|
||||
for _, call := range []caller{u.collectIDs, u.collectAnomalies, u.collectAlarms, u.collectEvents} {
|
||||
for _, call := range []caller{u.collectIDs, u.collectAnomalies, u.collectAlarms, u.collectEvents, u.collectSyslog} {
|
||||
if newLogs, err = call(logs, sites, c); err != nil {
|
||||
return logs, err
|
||||
}
|
||||
|
|
@ -98,7 +98,7 @@ func (u *InputUnifi) collectAnomalies(logs []any, sites []*unifi.Site, c *Contro
|
|||
|
||||
func (u *InputUnifi) collectEvents(logs []any, sites []*unifi.Site, c *Controller) ([]any, error) {
|
||||
if *c.SaveEvents {
|
||||
u.LogDebugf("Collecting controller site events: %s (%s)", c.URL, c.ID)
|
||||
u.LogDebugf("Collecting controller site events (v1): %s (%s)", c.URL, c.ID)
|
||||
|
||||
for _, s := range sites {
|
||||
events, err := c.Unifi.GetSiteEvents(s, time.Hour)
|
||||
|
|
@ -123,6 +123,35 @@ func (u *InputUnifi) collectEvents(logs []any, sites []*unifi.Site, c *Controlle
|
|||
return logs, nil
|
||||
}
|
||||
|
||||
func (u *InputUnifi) collectSyslog(logs []any, sites []*unifi.Site, c *Controller) ([]any, error) {
|
||||
if *c.SaveSyslog {
|
||||
u.LogDebugf("Collecting controller syslog (v2): %s (%s)", c.URL, c.ID)
|
||||
|
||||
// Use v2 system-log API
|
||||
req := unifi.DefaultSystemLogRequest(time.Hour)
|
||||
entries, err := c.Unifi.GetSystemLog(sites, req)
|
||||
if err != nil {
|
||||
return logs, fmt.Errorf("unifi.GetSystemLog(): %w", err)
|
||||
}
|
||||
|
||||
for _, e := range entries {
|
||||
e := redactSystemLogEntry(e, c.HashPII, c.DropPII)
|
||||
logs = append(logs, e)
|
||||
|
||||
webserver.NewInputEvent(PluginName, e.SiteName+"_syslog", &webserver.Event{
|
||||
Msg: e.Msg(), Ts: e.Datetime(), Tags: map[string]string{
|
||||
"type": "syslog", "key": e.Key, "event": e.Event,
|
||||
"site_name": e.SiteName, "source": e.SourceName,
|
||||
"category": e.Category, "subcategory": e.Subcategory,
|
||||
"severity": e.Severity,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return logs, nil
|
||||
}
|
||||
|
||||
func (u *InputUnifi) collectIDs(logs []any, sites []*unifi.Site, c *Controller) ([]any, error) {
|
||||
if *c.SaveIDs {
|
||||
u.LogDebugf("Collecting controller IDs data: %s (%s)", c.URL, c.ID)
|
||||
|
|
@ -175,3 +204,50 @@ func redactEvent(e *unifi.Event, hash *bool, dropPII *bool) *unifi.Event {
|
|||
|
||||
return e
|
||||
}
|
||||
|
||||
// redactSystemLogEntry attempts to mask personally identifying information from v2 system log entries.
|
||||
func redactSystemLogEntry(e *unifi.SystemLogEntry, hash *bool, dropPII *bool) *unifi.SystemLogEntry {
|
||||
if !*hash && !*dropPII {
|
||||
return e
|
||||
}
|
||||
|
||||
// Redact CLIENT parameter if present
|
||||
if client, ok := e.Parameters["CLIENT"]; ok {
|
||||
if *dropPII {
|
||||
client.Hostname = ""
|
||||
client.Name = ""
|
||||
client.ID = ""
|
||||
client.IP = ""
|
||||
} else {
|
||||
client.Hostname = RedactNamePII(client.Hostname, hash, dropPII)
|
||||
client.Name = RedactNamePII(client.Name, hash, dropPII)
|
||||
client.ID = RedactMacPII(client.ID, hash, dropPII)
|
||||
client.IP = RedactIPPII(client.IP, hash, dropPII)
|
||||
}
|
||||
e.Parameters["CLIENT"] = client
|
||||
}
|
||||
|
||||
// Redact IP parameter if present
|
||||
if ip, ok := e.Parameters["IP"]; ok {
|
||||
if *dropPII {
|
||||
ip.ID = ""
|
||||
ip.Name = ""
|
||||
} else {
|
||||
ip.ID = RedactIPPII(ip.ID, hash, dropPII)
|
||||
ip.Name = RedactIPPII(ip.Name, hash, dropPII)
|
||||
}
|
||||
e.Parameters["IP"] = ip
|
||||
}
|
||||
|
||||
// Redact ADMIN parameter if present
|
||||
if admin, ok := e.Parameters["ADMIN"]; ok {
|
||||
if *dropPII {
|
||||
admin.Name = ""
|
||||
} else {
|
||||
admin.Name = RedactNamePII(admin.Name, hash, dropPII)
|
||||
}
|
||||
e.Parameters["ADMIN"] = admin
|
||||
}
|
||||
|
||||
return e
|
||||
}
|
||||
|
|
|
|||
|
|
@ -279,6 +279,22 @@ func RedactMacPII(pii string, hash *bool, dropPII *bool) (output string) {
|
|||
return fmt.Sprintf("%s:%s:%s:%s:%s:%s:%s", s[:2], s[2:4], s[4:6], s[6:8], s[8:10], s[10:12], s[12:14])
|
||||
}
|
||||
|
||||
// RedactIPPII converts an IP address to an md5 hashed version (first 12 chars only).
|
||||
// Useful for maskiing out personally identifying information.
|
||||
func RedactIPPII(pii string, hash *bool, dropPII *bool) string {
|
||||
if dropPII != nil && *dropPII {
|
||||
return ""
|
||||
}
|
||||
|
||||
if hash == nil || !*hash || pii == "" {
|
||||
return pii
|
||||
}
|
||||
|
||||
s := fmt.Sprintf("%x", md5.Sum([]byte(pii))) // nolint: gosec
|
||||
// Format as a "fake" IP-like string.
|
||||
return fmt.Sprintf("%s.%s.%s", s[:4], s[4:8], s[8:12])
|
||||
}
|
||||
|
||||
// getFilteredSites returns a list of sites to fetch data for.
|
||||
// Omits requested but unconfigured sites. Grabs the full list from the
|
||||
// controller and returns the sites provided in the config file.
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ type Controller struct {
|
|||
SaveAnomal *bool `json:"save_anomalies" toml:"save_anomalies" xml:"save_anomalies" yaml:"save_anomalies"`
|
||||
SaveAlarms *bool `json:"save_alarms" toml:"save_alarms" xml:"save_alarms" yaml:"save_alarms"`
|
||||
SaveEvents *bool `json:"save_events" toml:"save_events" xml:"save_events" yaml:"save_events"`
|
||||
SaveSyslog *bool `json:"save_syslog" toml:"save_syslog" xml:"save_syslog" yaml:"save_syslog"`
|
||||
SaveIDs *bool `json:"save_ids" toml:"save_ids" xml:"save_ids" yaml:"save_ids"`
|
||||
SaveDPI *bool `json:"save_dpi" toml:"save_dpi" xml:"save_dpi" yaml:"save_dpi"`
|
||||
SaveRogue *bool `json:"save_rogue" toml:"save_rogue" xml:"save_rogue" yaml:"save_rogue"`
|
||||
|
|
@ -244,6 +245,10 @@ func (u *InputUnifi) setDefaults(c *Controller) { //nolint:cyclop
|
|||
c.SaveEvents = &f
|
||||
}
|
||||
|
||||
if c.SaveSyslog == nil {
|
||||
c.SaveSyslog = &f
|
||||
}
|
||||
|
||||
if c.SaveAlarms == nil {
|
||||
c.SaveAlarms = &f
|
||||
}
|
||||
|
|
@ -323,6 +328,10 @@ func (u *InputUnifi) setControllerDefaults(c *Controller) *Controller { //nolint
|
|||
c.SaveEvents = u.Default.SaveEvents
|
||||
}
|
||||
|
||||
if c.SaveSyslog == nil {
|
||||
c.SaveSyslog = u.Default.SaveSyslog
|
||||
}
|
||||
|
||||
if c.SaveAlarms == nil {
|
||||
c.SaveAlarms = u.Default.SaveAlarms
|
||||
}
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ func (u *InputUnifi) logController(c *Controller) {
|
|||
u.Logf(" => Username: %s (has password: %v) (has api-key: %v)", c.User, c.Pass != "", c.APIKey != "")
|
||||
u.Logf(" => Hash PII %v / Drop PII %v / Poll Sites: %s", *c.HashPII, *c.DropPII, strings.Join(c.Sites, ", "))
|
||||
u.Logf(" => Save Sites %v / Save DPI %v (metrics)", *c.SaveSites, *c.SaveDPI)
|
||||
u.Logf(" => Save Events %v / Save IDs %v (logs)", *c.SaveEvents, *c.SaveIDs)
|
||||
u.Logf(" => Save Events %v / Save Syslog %v / Save IDs %v (logs)", *c.SaveEvents, *c.SaveSyslog, *c.SaveIDs)
|
||||
u.Logf(" => Save Alarms %v / Anomalies %v (logs)", *c.SaveAlarms, *c.SaveAnomal)
|
||||
u.Logf(" => Save Rogue APs: %v", *c.SaveRogue)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,9 +2,37 @@
|
|||
|
||||
Loki Output Plugin for UnPoller
|
||||
|
||||
This plugin writes UniFi Events and IDS data to Loki. Maybe Alarms too.
|
||||
This plugin writes UniFi Events, System Logs, IDS, Alarms, and Anomalies to Loki as JSON.
|
||||
|
||||
Example Config:
|
||||
## Log Types
|
||||
|
||||
| Application Label | Config Option | API | Description |
|
||||
|-------------------|---------------|-----|-------------|
|
||||
| `unifi_system_log` | `save_syslog` | v2 | System log events (UDM recommended) |
|
||||
| `unifi_event` | `save_events` | v1 | Legacy events (older controllers) |
|
||||
| `unifi_ids` | `save_ids` | v1 | Intrusion Detection System events |
|
||||
| `unifi_alarm` | `save_alarms` | v1 | Alarm events |
|
||||
| `unifi_anomaly` | `save_anomalies` | v1 | Anomaly events |
|
||||
|
||||
## Querying in Loki
|
||||
|
||||
All logs are stored as JSON. Use Loki's `| json` parser to extract fields:
|
||||
|
||||
```logql
|
||||
{application="unifi_system_log"} | json
|
||||
```
|
||||
|
||||
Filter by severity:
|
||||
```logql
|
||||
{application="unifi_system_log", severity="HIGH"} | json
|
||||
```
|
||||
|
||||
Extract specific fields:
|
||||
```logql
|
||||
{application="unifi_system_log"} | json | line_format "{{.message}}"
|
||||
```
|
||||
|
||||
## Example Config
|
||||
|
||||
```toml
|
||||
[loki]
|
||||
|
|
@ -23,4 +51,25 @@ Example Config:
|
|||
|
||||
# Used for auth-less multi-tenant.
|
||||
#tenant_id = ""
|
||||
|
||||
[unifi.defaults]
|
||||
# For UDM/UDM-Pro/UCG devices, use save_syslog (v2 API)
|
||||
save_syslog = true
|
||||
|
||||
# For older controllers, use save_events (v1 API)
|
||||
save_events = false
|
||||
|
||||
# Other log types
|
||||
save_ids = false
|
||||
save_alarms = false
|
||||
save_anomalies = false
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
```bash
|
||||
UP_LOKI_URL=http://localhost:3100
|
||||
UP_LOKI_INTERVAL=2m
|
||||
UP_UNIFI_DEFAULT_SAVE_SYSLOG=true
|
||||
UP_UNIFI_DEFAULT_SAVE_EVENTS=false
|
||||
```
|
||||
|
|
|
|||
|
|
@ -55,6 +55,8 @@ func (r *Report) ProcessEventLogs(events *poller.Events) *Logs {
|
|||
r.Alarm(event, logs)
|
||||
case *unifi.Anomaly:
|
||||
r.Anomaly(event, logs)
|
||||
case *unifi.SystemLogEntry:
|
||||
r.SystemLogEvent(event, logs)
|
||||
default: // unlikely.
|
||||
r.LogErrorf("unknown event type: %T", e)
|
||||
}
|
||||
|
|
@ -64,9 +66,10 @@ func (r *Report) ProcessEventLogs(events *poller.Events) *Logs {
|
|||
}
|
||||
|
||||
func (r *Report) String() string {
|
||||
return fmt.Sprintf("%s: %d, %s: %d, %s: %d, %s: %d, Dur: %v",
|
||||
return fmt.Sprintf("%s: %d, %s: %d, %s: %d, %s: %d, %s: %d, Dur: %v",
|
||||
typeEvent, r.Counts[typeEvent], typeIDs, r.Counts[typeIDs],
|
||||
typeAlarm, r.Counts[typeAlarm], typeAnomaly, r.Counts[typeAnomaly],
|
||||
typeSystemLog, r.Counts[typeSystemLog],
|
||||
time.Since(r.Start).Round(time.Millisecond))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
package lokiunifi
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
|
||||
"github.com/unpoller/unifi/v5"
|
||||
|
|
@ -9,6 +10,7 @@ import (
|
|||
const typeAlarm = "Alarm"
|
||||
|
||||
// Alarm stores a structured Alarm for batch sending to Loki.
|
||||
// Logs the raw JSON for parsing with Loki's `| json` pipeline.
|
||||
func (r *Report) Alarm(event *unifi.Alarm, logs *Logs) {
|
||||
if event.Datetime.Before(r.Oldest) {
|
||||
return
|
||||
|
|
@ -16,23 +18,18 @@ func (r *Report) Alarm(event *unifi.Alarm, logs *Logs) {
|
|||
|
||||
r.Counts[typeAlarm]++ // increase counter and append new log line.
|
||||
|
||||
// Marshal event to JSON for the log line
|
||||
msg, err := json.Marshal(event)
|
||||
if err != nil {
|
||||
msg = []byte(event.Msg)
|
||||
}
|
||||
|
||||
logs.Streams = append(logs.Streams, LogStream{
|
||||
Entries: [][]string{{strconv.FormatInt(event.Datetime.UnixNano(), 10), event.Msg}},
|
||||
Entries: [][]string{{strconv.FormatInt(event.Datetime.UnixNano(), 10), string(msg)}},
|
||||
Labels: CleanLabels(map[string]string{
|
||||
"application": "unifi_alarm",
|
||||
"host": event.Host,
|
||||
"source": event.SourceName,
|
||||
"site_name": event.SiteName,
|
||||
"subsystem": event.Subsystem,
|
||||
"category": event.Catname.String(),
|
||||
"event_type": event.EventType,
|
||||
"key": event.Key,
|
||||
"app_protocol": event.AppProto,
|
||||
"protocol": event.Proto,
|
||||
"interface": event.InIface,
|
||||
"src_country": event.SrcIPCountry,
|
||||
"usgip": event.USGIP,
|
||||
"action": event.InnerAlertAction,
|
||||
"application": "unifi_alarm",
|
||||
"source": event.SourceName,
|
||||
"site_name": event.SiteName,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
package lokiunifi
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
|
||||
"github.com/unpoller/unifi/v5"
|
||||
|
|
@ -9,6 +10,7 @@ import (
|
|||
const typeAnomaly = "Anomaly"
|
||||
|
||||
// Anomaly stores a structured Anomaly for batch sending to Loki.
|
||||
// Logs the raw JSON for parsing with Loki's `| json` pipeline.
|
||||
func (r *Report) Anomaly(event *unifi.Anomaly, logs *Logs) {
|
||||
if event.Datetime.Before(r.Oldest) {
|
||||
return
|
||||
|
|
@ -16,13 +18,18 @@ func (r *Report) Anomaly(event *unifi.Anomaly, logs *Logs) {
|
|||
|
||||
r.Counts[typeAnomaly]++ // increase counter and append new log line.
|
||||
|
||||
// Marshal event to JSON for the log line
|
||||
msg, err := json.Marshal(event)
|
||||
if err != nil {
|
||||
msg = []byte(event.Anomaly)
|
||||
}
|
||||
|
||||
logs.Streams = append(logs.Streams, LogStream{
|
||||
Entries: [][]string{{strconv.FormatInt(event.Datetime.UnixNano(), 10), event.Anomaly}},
|
||||
Entries: [][]string{{strconv.FormatInt(event.Datetime.UnixNano(), 10), string(msg)}},
|
||||
Labels: CleanLabels(map[string]string{
|
||||
"application": "unifi_anomaly",
|
||||
"source": event.SourceName,
|
||||
"site_name": event.SiteName,
|
||||
"device_mac": event.DeviceMAC,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,14 +1,17 @@
|
|||
package lokiunifi
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
|
||||
"github.com/unpoller/unifi/v5"
|
||||
)
|
||||
|
||||
const typeEvent = "Event"
|
||||
const typeSystemLog = "SystemLog"
|
||||
|
||||
// Event stores a structured UniFi Event for batch sending to Loki.
|
||||
// Logs the raw JSON for parsing with Loki's `| json` pipeline.
|
||||
func (r *Report) Event(event *unifi.Event, logs *Logs) {
|
||||
if event.Datetime.Before(r.Oldest) {
|
||||
return
|
||||
|
|
@ -16,41 +19,45 @@ func (r *Report) Event(event *unifi.Event, logs *Logs) {
|
|||
|
||||
r.Counts[typeEvent]++ // increase counter and append new log line.
|
||||
|
||||
// Marshal event to JSON for the log line
|
||||
msg, err := json.Marshal(event)
|
||||
if err != nil {
|
||||
msg = []byte(event.Msg)
|
||||
}
|
||||
|
||||
logs.Streams = append(logs.Streams, LogStream{
|
||||
Entries: [][]string{{strconv.FormatInt(event.Datetime.UnixNano(), 10), event.Msg}},
|
||||
Entries: [][]string{{strconv.FormatInt(event.Datetime.UnixNano(), 10), string(msg)}},
|
||||
Labels: CleanLabels(map[string]string{
|
||||
"application": "unifi_event",
|
||||
"admin": event.Admin, // username
|
||||
"host": event.Host,
|
||||
"hostname": event.Hostname,
|
||||
"site_name": event.SiteName,
|
||||
"source": event.SourceName,
|
||||
"subsystem": event.Subsystem,
|
||||
"ap_from": event.ApFrom,
|
||||
"ap_to": event.ApTo,
|
||||
"ap": event.Ap,
|
||||
"ap_name": event.ApName,
|
||||
"gw": event.Gw,
|
||||
"gw_name": event.GwName,
|
||||
"sw": event.Sw,
|
||||
"sw_name": event.SwName,
|
||||
"category": event.Catname.String(),
|
||||
"radio": event.Radio,
|
||||
"radio_from": event.RadioFrom,
|
||||
"radio_to": event.RadioTo,
|
||||
"key": event.Key,
|
||||
"interface": event.InIface,
|
||||
"event_type": event.EventType,
|
||||
"ssid": event.SSID,
|
||||
"channel": event.Channel.Txt,
|
||||
"channel_from": event.ChannelFrom.Txt,
|
||||
"channel_to": event.ChannelTo.Txt,
|
||||
"usgip": event.USGIP,
|
||||
"network": event.Network,
|
||||
"app_protocol": event.AppProto,
|
||||
"protocol": event.Proto,
|
||||
"action": event.InnerAlertAction,
|
||||
"src_country": event.SrcIPCountry,
|
||||
"application": "unifi_event",
|
||||
"site_name": event.SiteName,
|
||||
"source": event.SourceName,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
// SystemLogEvent stores a structured UniFi v2 System Log Entry for batch sending to Loki.
|
||||
// Logs the raw JSON for parsing with Loki's `| json` pipeline.
|
||||
func (r *Report) SystemLogEvent(event *unifi.SystemLogEntry, logs *Logs) {
|
||||
if event.Datetime().Before(r.Oldest) {
|
||||
return
|
||||
}
|
||||
|
||||
r.Counts[typeSystemLog]++ // increase counter and append new log line.
|
||||
|
||||
// Marshal event to JSON for the log line
|
||||
msg, err := json.Marshal(event)
|
||||
if err != nil {
|
||||
msg = []byte(event.TitleRaw)
|
||||
}
|
||||
|
||||
logs.Streams = append(logs.Streams, LogStream{
|
||||
Entries: [][]string{{strconv.FormatInt(event.Datetime().UnixNano(), 10), string(msg)}},
|
||||
Labels: CleanLabels(map[string]string{
|
||||
"application": "unifi_system_log",
|
||||
"site_name": event.SiteName,
|
||||
"source": event.SourceName,
|
||||
"category": event.Category,
|
||||
"severity": event.Severity,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
package lokiunifi
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
|
||||
"github.com/unpoller/unifi/v5"
|
||||
|
|
@ -8,7 +9,8 @@ import (
|
|||
|
||||
const typeIDs = "IDs"
|
||||
|
||||
// event stores a structured event Event for batch sending to Loki.
|
||||
// IDs stores a structured IDS Event for batch sending to Loki.
|
||||
// Logs the raw JSON for parsing with Loki's `| json` pipeline.
|
||||
func (r *Report) IDs(event *unifi.IDS, logs *Logs) {
|
||||
if event.Datetime.Before(r.Oldest) {
|
||||
return
|
||||
|
|
@ -16,26 +18,18 @@ func (r *Report) IDs(event *unifi.IDS, logs *Logs) {
|
|||
|
||||
r.Counts[typeIDs]++ // increase counter and append new log line.
|
||||
|
||||
// Marshal event to JSON for the log line
|
||||
msg, err := json.Marshal(event)
|
||||
if err != nil {
|
||||
msg = []byte(event.Msg)
|
||||
}
|
||||
|
||||
logs.Streams = append(logs.Streams, LogStream{
|
||||
Entries: [][]string{{strconv.FormatInt(event.Datetime.UnixNano(), 10), event.Msg}},
|
||||
Entries: [][]string{{strconv.FormatInt(event.Datetime.UnixNano(), 10), string(msg)}},
|
||||
Labels: CleanLabels(map[string]string{
|
||||
"application": "unifi_ids",
|
||||
"source": event.SourceName,
|
||||
"host": event.Host,
|
||||
"site_name": event.SiteName,
|
||||
"subsystem": event.Subsystem,
|
||||
"category": event.Catname.String(),
|
||||
"event_type": event.EventType,
|
||||
"key": event.Key,
|
||||
"app_protocol": event.AppProto,
|
||||
"protocol": event.Proto,
|
||||
"interface": event.InIface,
|
||||
"src_country": event.SrcIPCountry,
|
||||
"src_city": event.SourceIPGeo.City,
|
||||
"src_continent": event.SourceIPGeo.ContinentCode,
|
||||
"src_country_code": event.SourceIPGeo.CountryCode,
|
||||
"usgip": event.USGIP,
|
||||
"action": event.InnerAlertAction,
|
||||
"application": "unifi_ids",
|
||||
"source": event.SourceName,
|
||||
"site_name": event.SiteName,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue