From 397d2b2ab4ed926b196efe74f03c96b42cf566e7 Mon Sep 17 00:00:00 2001 From: Pavlo Golub Date: Tue, 13 Feb 2024 17:58:27 +0100 Subject: [PATCH] [*] move `psutil` and `logparser` to `sources` package (#363) --- src/database.go | 24 ++-- src/log/formatter.go | 2 +- src/main.go | 52 +++++---- src/{ => metrics}/logparse.go | 129 ++++++++------------- src/{ => metrics}/psutil/psutil.go | 0 src/{ => metrics}/psutil/psutil_darwin.go | 0 src/{ => metrics}/psutil/psutil_linux.go | 0 src/{ => metrics}/psutil/psutil_windows.go | 0 src/metrics/types.go | 10 ++ 9 files changed, 104 insertions(+), 113 deletions(-) rename src/{ => metrics}/logparse.go (74%) rename src/{ => metrics}/psutil/psutil.go (100%) rename src/{ => metrics}/psutil/psutil_darwin.go (100%) rename src/{ => metrics}/psutil/psutil_linux.go (100%) rename src/{ => metrics}/psutil/psutil_windows.go (100%) diff --git a/src/database.go b/src/database.go index fea4c5396d..3c8b8ea639 100644 --- a/src/database.go +++ b/src/database.go @@ -13,7 +13,7 @@ import ( "github.com/cybertec-postgresql/pgwatch3/db" "github.com/cybertec-postgresql/pgwatch3/metrics" - "github.com/cybertec-postgresql/pgwatch3/psutil" + "github.com/cybertec-postgresql/pgwatch3/metrics/psutil" "github.com/cybertec-postgresql/pgwatch3/sources" "github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5/pgxpool" @@ -81,30 +81,30 @@ func DBExecRead(ctx context.Context, conn db.PgxIface, sql string, args ...any) return nil, err } +func GetConnByUniqueName(dbUnique string) db.PgxIface { + monitoredDbConnCacheLock.RLock() + conn := monitoredDbConnCache[dbUnique] + monitoredDbConnCacheLock.RUnlock() + return conn +} + func DBExecReadByDbUniqueName(ctx context.Context, dbUnique string, sql string, args ...any) (metrics.Measurements, error) { var conn db.PgxIface var md sources.MonitoredDatabase var data metrics.Measurements var err error var tx pgx.Tx - var exists bool if strings.TrimSpace(sql) == "" { return nil, errors.New("empty SQL") } - md, err = GetMonitoredDatabaseByUniqueName(dbUnique) - if err != nil { + if md, err = GetMonitoredDatabaseByUniqueName(dbUnique); err != nil { return nil, err } - monitoredDbConnCacheLock.RLock() - // sqlx.DB itself is parallel safe - conn, exists = monitoredDbConnCache[dbUnique] - monitoredDbConnCacheLock.RUnlock() - if !exists || conn == nil { + if conn = GetConnByUniqueName(dbUnique); conn == nil { logger.Errorf("SQL connection for dbUnique %s not found or nil", dbUnique) // Should always be initialized in the main loop DB discovery code ... return nil, errors.New("SQL connection not found or nil") } - tx, err = conn.Begin(ctx) - if err != nil { + if tx, err = conn.Begin(ctx); err != nil { return nil, err } defer func() { _ = tx.Commit(ctx) }() @@ -224,7 +224,7 @@ func DBGetPGVersion(ctx context.Context, dbUnique string, srcType sources.Kind, } getVerLock.Lock() // limit to 1 concurrent version info fetch per DB defer getVerLock.Unlock() - logger.WithField("database", dbUnique). + logger.WithField("source", dbUnique). WithField("type", srcType).Debug("determining DB version and recovery status...") if verNew.Extensions == nil { diff --git a/src/log/formatter.go b/src/log/formatter.go index c3228cf9c8..d211c084ba 100644 --- a/src/log/formatter.go +++ b/src/log/formatter.go @@ -14,7 +14,7 @@ import ( func newFormatter(noColors bool) *Formatter { return &Formatter{ HideKeys: false, - FieldsOrder: []string{"database", "metric", "sql", "params"}, + FieldsOrder: []string{"source", "metric", "sql", "params"}, TimestampFormat: "2006-01-02 15:04:05.000", ShowFullLevel: true, NoColors: noColors, diff --git a/src/main.go b/src/main.go index da914cf3b6..38cb8e6880 100644 --- a/src/main.go +++ b/src/main.go @@ -24,7 +24,7 @@ import ( "github.com/cybertec-postgresql/pgwatch3/config" "github.com/cybertec-postgresql/pgwatch3/log" "github.com/cybertec-postgresql/pgwatch3/metrics" - "github.com/cybertec-postgresql/pgwatch3/psutil" + "github.com/cybertec-postgresql/pgwatch3/metrics/psutil" "github.com/cybertec-postgresql/pgwatch3/sinks" "github.com/cybertec-postgresql/pgwatch3/sources" "github.com/cybertec-postgresql/pgwatch3/webserver" @@ -32,11 +32,6 @@ import ( "github.com/sirupsen/logrus" ) -type ControlMessage struct { - Action string // START, STOP, PAUSE - Config map[string]float64 -} - type MetricFetchMessage struct { DBUniqueName string DBUniqueNameOrig string @@ -540,7 +535,7 @@ retry_with_superuser_sql: // if 1st fetch with normal SQL fails, try with SU SQL goto retry_with_superuser_sql } if firstErr != nil { - logger.WithField("database", msg.DBUniqueName).WithField("metric", msg.MetricName).Error(err) + logger.WithField("source", msg.DBUniqueName).WithField("metric", msg.MetricName).Error(err) return nil, firstErr // returning the initial error } logger.Infof("[%s:%s] failed to fetch metrics: %s", msg.DBUniqueName, msg.MetricName, err) @@ -553,7 +548,7 @@ retry_with_superuser_sql: // if 1st fetch with normal SQL fails, try with SU SQL return nil, err } - logger.WithFields(map[string]any{"database": msg.DBUniqueName, "metric": msg.MetricName, "rows": len(data)}).Info("measurements fetched") + logger.WithFields(map[string]any{"source": msg.DBUniqueName, "metric": msg.MetricName, "rows": len(data)}).Info("measurements fetched") if regexIsPgbouncerMetrics.MatchString(msg.MetricName) { // clean unwanted pgbouncer pool stats here as not possible in SQL data = FilterPgbouncerData(data, md.GetDatabaseName(), vme) } @@ -717,8 +712,15 @@ func deepCopyMetricDefinitionMap(mdm map[string]map[uint]metrics.MetricPropertie return newMdm } -// ControlMessage notifies of shutdown + interval change -func MetricGathererLoop(ctx context.Context, dbUniqueName, dbUniqueNameOrig string, srcType sources.Kind, metricName string, configMap map[string]float64, controlCh <-chan ControlMessage, storeCh chan<- []metrics.MeasurementMessage) { +// metrics.ControlMessage notifies of shutdown + interval change +func MetricGathererLoop(ctx context.Context, + dbUniqueName, dbUniqueNameOrig string, + srcType sources.Kind, + metricName string, + configMap map[string]float64, + controlCh <-chan metrics.ControlMessage, + storeCh chan<- []metrics.MeasurementMessage) { + config := configMap interval := config[metricName] hostState := make(map[string]map[string]string) @@ -732,9 +734,17 @@ func MetricGathererLoop(ctx context.Context, dbUniqueName, dbUniqueNameOrig stri lastDBVersionFetchTime := time.Unix(0, 0) // check DB ver. ev. 5 min var stmtTimeoutOverride int64 - l := logger.WithField("database", dbUniqueName).WithField("metric", metricName) + l := logger.WithField("source", dbUniqueName).WithField("metric", metricName) if metricName == specialMetricServerLogEventCounts { - logparseLoop(dbUniqueName, metricName, configMap, controlCh, storeCh) // no return + mdb, err := GetMonitoredDatabaseByUniqueName(dbUniqueName) + if err != nil { + return + } + dbPgVersionMapLock.RLock() + realDbname := dbPgVersionMap[dbUniqueName].RealDbname // to manage 2 sets of event counts - monitored DB + global + dbPgVersionMapLock.RUnlock() + conn := GetConnByUniqueName(dbUniqueName) + metrics.ParseLogs(ctx, conn, mdb, realDbname, metricName, configMap, controlCh, storeCh) // no return return } @@ -1234,7 +1244,7 @@ func shouldDbBeMonitoredBasedOnCurrentState(md sources.MonitoredDatabase) bool { return !IsDBDormant(md.DBUniqueName) } -func ControlChannelsMapToList(controlChannels map[string]chan ControlMessage) []string { +func ControlChannelsMapToList(controlChannels map[string]chan metrics.ControlMessage) []string { controlChannelList := make([]string, len(controlChannels)) i := 0 for key := range controlChannels { @@ -1464,7 +1474,7 @@ func main() { go StatsSummarizer(mainContext) } - controlChannels := make(map[string](chan ControlMessage)) // [db1+metric1]=chan + controlChannels := make(map[string](chan metrics.ControlMessage)) // [db1+metric1]=chan measurementCh := make(chan []metrics.MeasurementMessage, 10000) var monitoredDbs sources.MonitoredDatabases @@ -1517,7 +1527,7 @@ func main() { } logger. - WithField("databases", len(monitoredDbs)). + WithField("sources", len(monitoredDbs)). WithField("metrics", len(metricDefinitionMap)). Log(func() logrus.Level { if firstLoop && len(monitoredDbs)*len(metricDefinitionMap) == 0 { @@ -1529,7 +1539,7 @@ func main() { firstLoop = false // only used for failing when 1st config reading fails for _, host := range monitoredDbs { - logger.WithField("database", host.DBUniqueName). + logger.WithField("source", host.DBUniqueName). WithField("metric", host.Metrics). WithField("tags", host.CustomTags). WithField("config", host.HostConfig).Debug() @@ -1674,8 +1684,8 @@ func main() { if metricDefOk && !chOk { // initialize a new per db/per metric control channel if interval > 0 { hostMetricIntervalMap[dbMetric] = interval - logger.WithField("database", dbUnique).WithField("metric", metric).WithField("interval", interval).Info("starting gatherer") - controlChannels[dbMetric] = make(chan ControlMessage, 1) + logger.WithField("source", dbUnique).WithField("metric", metric).WithField("interval", interval).Info("starting gatherer") + controlChannels[dbMetric] = make(chan metrics.ControlMessage, 1) metricNameForStorage := metricName if _, isSpecialMetric := specialMetrics[metricName]; !isSpecialMetric { @@ -1701,7 +1711,7 @@ func main() { } else if (!metricDefOk && chOk) || interval <= 0 { // metric definition files were recently removed or interval set to zero logger.Warning("shutting down metric", metric, "for", host.DBUniqueName) - controlChannels[dbMetric] <- ControlMessage{Action: gathererStatusStop} + controlChannels[dbMetric] <- metrics.ControlMessage{Action: gathererStatusStop} delete(controlChannels, dbMetric) } else if !metricDefOk { epoch, ok := lastSQLFetchError.Load(metric) @@ -1713,7 +1723,7 @@ func main() { // check if interval has changed if hostMetricIntervalMap[dbMetric] != interval { logger.Warning("sending interval update for", dbUnique, metric) - controlChannels[dbMetric] <- ControlMessage{Action: gathererStatusStart, Config: metricConfig} + controlChannels[dbMetric] <- metrics.ControlMessage{Action: gathererStatusStart, Config: metricConfig} hostMetricIntervalMap[dbMetric] = interval } } @@ -1782,7 +1792,7 @@ func main() { if mainContext.Err() != nil || wholeDbShutDownDueToRoleChange || dbRemovedFromConfig || singleMetricDisabled { logger.Infof("shutting down gatherer for [%s:%s] ...", db, metric) - controlChannels[dbMetric] <- ControlMessage{Action: gathererStatusStop} + controlChannels[dbMetric] <- metrics.ControlMessage{Action: gathererStatusStop} delete(controlChannels, dbMetric) logger.Debugf("control channel for [%s:%s] deleted", db, metric) gatherersShutDown++ diff --git a/src/logparse.go b/src/metrics/logparse.go similarity index 74% rename from src/logparse.go rename to src/metrics/logparse.go index c9fb2b4f2d..93ed5fac0b 100644 --- a/src/logparse.go +++ b/src/metrics/logparse.go @@ -1,7 +1,8 @@ -package main +package metrics import ( "bufio" + "context" "io" "os" "path" @@ -10,10 +11,13 @@ import ( "strings" "time" - "github.com/cybertec-postgresql/pgwatch3/metrics" + "github.com/cybertec-postgresql/pgwatch3/db" + "github.com/cybertec-postgresql/pgwatch3/log" "github.com/cybertec-postgresql/pgwatch3/sources" ) +const specialMetricServerLogEventCounts = "server_log_event_counts" + var PgSeverities = [...]string{"DEBUG", "INFO", "NOTICE", "WARNING", "ERROR", "LOG", "FATAL", "PANIC"} var PgSeveritiesLocale = map[string]map[string]string{ "C.": {"DEBUG": "DEBUG", "LOG": "LOG", "INFO": "INFO", "NOTICE": "NOTICE", "WARNING": "WARNING", "ERROR": "ERROR", "FATAL": "FATAL", "PANIC": "PANIC"}, @@ -31,38 +35,35 @@ var PgSeveritiesLocale = map[string]map[string]string{ const CSVLogDefaultRegEx = `^^(?P.*?),"?(?P.*?)"?,"?(?P.*?)"?,(?P\d+),"?(?P.*?)"?,(?P.*?),(?P\d+),"?(?P.*?)"?,(?P.*?),(?P.*?),(?P.*?),(?P\w+),` const CSVLogDefaultGlobSuffix = "*.csv" -func getFileWithLatestTimestamp(files []string) (string, time.Time) { +func getFileWithLatestTimestamp(files []string) (string, error) { var maxDate time.Time var latest string for _, f := range files { fi, err := os.Stat(f) if err != nil { - logger.Errorf("Failed to stat() file %s: %s", f, err) - continue + return "", err } if fi.ModTime().After(maxDate) { latest = f maxDate = fi.ModTime() } } - return latest, maxDate + return latest, nil } -func getFileWithNextModTimestamp(dbUniqueName, logsGlobPath, currentFile string) (string, time.Time) { +func getFileWithNextModTimestamp(logsGlobPath, currentFile string) (string, error) { var nextFile string var nextMod time.Time files, err := filepath.Glob(logsGlobPath) if err != nil { - logger.Error("[%s] Error globbing \"%s\"...", dbUniqueName, logsGlobPath) - return "", time.Now() + return "", err } fiCurrent, err := os.Stat(currentFile) if err != nil { - logger.Errorf("Failed to stat() currentFile %s: %s", currentFile, err) - return "", time.Now() + return "", err } //log.Debugf("Stat().ModTime() for %s: %v", currentFile, fiCurrent.ModTime()) @@ -72,7 +73,6 @@ func getFileWithNextModTimestamp(dbUniqueName, logsGlobPath, currentFile string) } fi, err := os.Stat(f) if err != nil { - logger.Errorf("Failed to stat() currentFile %s: %s", f, err) continue } //log.Debugf("Stat().ModTime() for %s: %v", f, fi.ModTime()) @@ -81,12 +81,12 @@ func getFileWithNextModTimestamp(dbUniqueName, logsGlobPath, currentFile string) nextFile = f } } - return nextFile, nextMod + return nextFile, nil } // 1. add zero counts for severity levels that didn't have any occurrences in the log -func eventCountsToMetricStoreMessages(eventCounts, eventCountsTotal map[string]int64, mdb sources.MonitoredDatabase) []metrics.MeasurementMessage { - allSeverityCounts := make(metrics.Measurement) +func eventCountsToMetricStoreMessages(eventCounts, eventCountsTotal map[string]int64, mdb sources.MonitoredDatabase) []MeasurementMessage { + allSeverityCounts := make(Measurement) for _, s := range PgSeverities { parsedCount, ok := eventCounts[s] if ok { @@ -102,34 +102,33 @@ func eventCountsToMetricStoreMessages(eventCounts, eventCountsTotal map[string]i } } allSeverityCounts["epoch_ns"] = time.Now().UnixNano() - return []metrics.MeasurementMessage{{ + return []MeasurementMessage{{ DBName: mdb.DBUniqueName, SourceType: string(mdb.Kind), MetricName: specialMetricServerLogEventCounts, - Data: metrics.Measurements{allSeverityCounts}, + Data: Measurements{allSeverityCounts}, CustomTags: mdb.CustomTags, }} } -func logparseLoop(dbUniqueName, metricName string, configMap map[string]float64, controlCh <-chan ControlMessage, storeCh chan<- []metrics.MeasurementMessage) { +func ParseLogs(ctx context.Context, conn db.PgxIface, mdb sources.MonitoredDatabase, realDbname, metricName string, configMap map[string]float64, controlCh <-chan ControlMessage, storeCh chan<- []MeasurementMessage) { - var latest, previous, realDbname, serverMessagesLang string + var latest, previous, serverMessagesLang string var latestHandle *os.File var reader *bufio.Reader var linesRead = 0 // to skip over already parsed lines on Postgres server restart for example var logsMatchRegex, logsMatchRegexPrev, logsGlobPath string var lastSendTime time.Time // to storage channel - var lastConfigRefreshTime time.Time //sources.MonitoredDatabase info var eventCounts = make(map[string]int64) // for the specific DB. [WARNING: 34, ERROR: 10, ...], zeroed on storage send var eventCountsTotal = make(map[string]int64) // for the whole instance - var mdb sources.MonitoredDatabase var hostConfig sources.HostConfigAttrs var config = configMap var interval float64 var err error var firstRun = true var csvlogRegex *regexp.Regexp - + dbUniqueName := realDbname + logger := log.GetLogger(ctx) for { // re-try loop. re-start in case of FS errors or just to refresh host config select { case msg := <-controlCh: @@ -148,21 +147,6 @@ func logparseLoop(dbUniqueName, metricName string, configMap map[string]float64, } } - if lastConfigRefreshTime.IsZero() || lastConfigRefreshTime.Add(time.Second*time.Duration(opts.Source.Refresh)).Before(time.Now()) { - mdb, err = GetMonitoredDatabaseByUniqueName(dbUniqueName) - if err != nil { - logger.Errorf("[%s] Failed to refresh monitored DBs info: %s", dbUniqueName, err) - time.Sleep(60 * time.Second) - continue - } - hostConfig = mdb.HostConfig - logger.Debugf("[%s] Refreshed hostConfig: %+v", dbUniqueName, hostConfig) - } - - dbPgVersionMapLock.RLock() - realDbname = dbPgVersionMap[dbUniqueName].RealDbname // to manage 2 sets of event counts - monitored DB + global - dbPgVersionMapLock.RUnlock() - if hostConfig.LogsMatchRegex != "" { logsMatchRegex = hostConfig.LogsMatchRegex } @@ -174,16 +158,16 @@ func logparseLoop(dbUniqueName, metricName string, configMap map[string]float64, logsGlobPath = hostConfig.LogsGlobPath } if logsGlobPath == "" { - logsGlobPath = tryDetermineLogFolder(mdb) - if logsGlobPath == "" { - logger.Warningf("[%s] Could not determine Postgres logs parsing folder. Configured logs_glob_path = %s", dbUniqueName, logsGlobPath) + logsGlobPath, err = tryDetermineLogFolder(ctx, conn) + if err != nil { + logger.WithError(err).Printf("[%s] Could not determine Postgres logs parsing folder. Configured logs_glob_path = %s", dbUniqueName, logsGlobPath) time.Sleep(60 * time.Second) continue } } - serverMessagesLang = tryDetermineLogMessagesLanguage(mdb) - if serverMessagesLang == "" { - logger.Warningf("[%s] Could not determine language (lc_collate) used for server logs, cannot parse logs...", dbUniqueName) + serverMessagesLang, err = tryDetermineLogMessagesLanguage(ctx, conn) + if err != nil { + logger.WithError(err).Warningf("[%s] Could not determine language (lc_collate) used for server logs, cannot parse logs...", dbUniqueName) time.Sleep(60 * time.Second) continue } @@ -258,12 +242,12 @@ func logparseLoop(dbUniqueName, metricName string, configMap map[string]float64, } var eofSleepMillis = 0 - readLoopStart := time.Now() + // readLoopStart := time.Now() for { - if readLoopStart.Add(time.Second * time.Duration(opts.Source.Refresh)).Before(time.Now()) { - break // refresh config - } + // if readLoopStart.Add(time.Second * time.Duration(opts.Source.Refresh)).Before(time.Now()) { + // break // refresh config + // } line, err := reader.ReadString('\n') if err != nil && err != io.EOF { logger.Warningf("[%s] Failed to read logfile %s: %s. Sleeping 60s...", dbUniqueName, latest, err) @@ -284,7 +268,7 @@ func logparseLoop(dbUniqueName, metricName string, configMap map[string]float64, time.Sleep(time.Millisecond * time.Duration(eofSleepMillis)) // check for newly opened logfiles - file, _ := getFileWithNextModTimestamp(dbUniqueName, logsGlobPath, latest) + file, _ := getFileWithNextModTimestamp(logsGlobPath, latest) if file != "" { previous = latest latest = file @@ -311,7 +295,7 @@ func logparseLoop(dbUniqueName, metricName string, configMap map[string]float64, goto send_to_storage_if_needed } - result := RegexMatchesToMap(csvlogRegex, matches) + result := regexMatchesToMap(csvlogRegex, matches) //log.Debugf("RegexMatchesToMap: %+v", result) errorSeverity, ok := result["error_severity"] if !ok { @@ -340,8 +324,8 @@ func logparseLoop(dbUniqueName, metricName string, configMap map[string]float64, logger.Debugf("[%s] Sending log event counts for last interval to storage channel. Local eventcounts: %+v, global eventcounts: %+v", dbUniqueName, eventCounts, eventCountsTotal) metricStoreMessages := eventCountsToMetricStoreMessages(eventCounts, eventCountsTotal, mdb) storeCh <- metricStoreMessages - ZeroEventCounts(eventCounts) - ZeroEventCounts(eventCountsTotal) + zeroEventCounts(eventCounts) + zeroEventCounts(eventCountsTotal) lastSendTime = time.Now() } @@ -358,58 +342,45 @@ func severityToEnglish(serverLang, errorSeverity string) string { severityMap := PgSeveritiesLocale[serverLang] severityEn, ok := severityMap[errorSeverity] if !ok { - logger.Warningf("Failed to map severity '%s' to english from language '%s'", errorSeverity, serverLang) return errorSeverity } return severityEn } -func ZeroEventCounts(eventCounts map[string]int64) { +func zeroEventCounts(eventCounts map[string]int64) { for _, severity := range PgSeverities { eventCounts[severity] = 0 } } -func tryDetermineLogFolder(mdb sources.MonitoredDatabase) string { +func tryDetermineLogFolder(ctx context.Context, conn db.PgxIface) (string, error) { sql := `select current_setting('data_directory') as dd, current_setting('log_directory') as ld` - - logger.Infof("[%s] Trying to determine server logs folder via SQL as host_config.logs_glob_path not specified...", mdb.DBUniqueName) - data, err := DBExecReadByDbUniqueName(mainContext, mdb.DBUniqueName, sql) + var dd, ld string + err := conn.QueryRow(ctx, sql).Scan(&dd, &ld) if err != nil { - logger.Errorf("[%s] Failed to query data_directory and log_directory settings...are you superuser or have pg_monitor grant?", mdb.DBUniqueName) - return "" + return "", err } - ld := data[0]["ld"].(string) - dd := data[0]["dd"].(string) if strings.HasPrefix(ld, "/") { // we have a full path we can use - return path.Join(ld, CSVLogDefaultGlobSuffix) + return path.Join(ld, CSVLogDefaultGlobSuffix), nil } - return path.Join(dd, ld, CSVLogDefaultGlobSuffix) + return path.Join(dd, ld, CSVLogDefaultGlobSuffix), nil } -func tryDetermineLogMessagesLanguage(mdb sources.MonitoredDatabase) string { +func tryDetermineLogMessagesLanguage(ctx context.Context, conn db.PgxIface) (string, error) { sql := `select current_setting('lc_messages')::varchar(2) as lc_messages;` - - logger.Debugf("[%s] Trying to determine server log messages language...", mdb.DBUniqueName) - data, err := DBExecReadByDbUniqueName(mainContext, mdb.DBUniqueName, sql) + var lc string + err := conn.QueryRow(ctx, sql).Scan(&lc) if err != nil { - logger.Errorf("[%s] Failed to lc_messages settings: %s", mdb.DBUniqueName, err) - return "" + return "", err } - lang := data[0]["lc_messages"].(string) - if lang == "en" { - return lang - } - _, ok := PgSeveritiesLocale[lang] - if !ok { - logger.Warningf("[%s] Server log language '%s' is not yet mapped, assuming severities in english: %+v", mdb.DBUniqueName, lang, PgSeverities) - return "en" + if _, ok := PgSeveritiesLocale[lc]; !ok { + return "en", nil } - return lang + return lc, nil } -func RegexMatchesToMap(csvlogRegex *regexp.Regexp, matches []string) map[string]string { +func regexMatchesToMap(csvlogRegex *regexp.Regexp, matches []string) map[string]string { result := make(map[string]string) if len(matches) == 0 || csvlogRegex == nil { return result diff --git a/src/psutil/psutil.go b/src/metrics/psutil/psutil.go similarity index 100% rename from src/psutil/psutil.go rename to src/metrics/psutil/psutil.go diff --git a/src/psutil/psutil_darwin.go b/src/metrics/psutil/psutil_darwin.go similarity index 100% rename from src/psutil/psutil_darwin.go rename to src/metrics/psutil/psutil_darwin.go diff --git a/src/psutil/psutil_linux.go b/src/metrics/psutil/psutil_linux.go similarity index 100% rename from src/psutil/psutil_linux.go rename to src/metrics/psutil/psutil_linux.go diff --git a/src/psutil/psutil_windows.go b/src/metrics/psutil/psutil_windows.go similarity index 100% rename from src/psutil/psutil_windows.go rename to src/metrics/psutil/psutil_windows.go diff --git a/src/metrics/types.go b/src/metrics/types.go index 0db21f2a4d..543087b7e2 100644 --- a/src/metrics/types.go +++ b/src/metrics/types.go @@ -52,6 +52,16 @@ type MeasurementMessage struct { SystemIdentifier string } +const ( + gathererStatusStart = "START" + gathererStatusStop = "STOP" +) + +type ControlMessage struct { + Action string // START, STOP, PAUSE + Config map[string]float64 +} + type Preset struct { Name string Description string