diff --git a/go.mod b/go.mod index a9d9b2cc8a..04190c00b3 100644 --- a/go.mod +++ b/go.mod @@ -43,6 +43,7 @@ require ( github.com/bitfield/script v0.19.0 github.com/blang/semver/v4 v4.0.0 github.com/go-chi/chi/v5 v5.0.8-0.20220103230436-7dbe9a0bd10f + github.com/gocarina/gocsv v0.0.0-20220927221512-ad3251f9fa25 github.com/ivanpirog/coloredcobra v1.0.0 github.com/james-barrow/golang-ipc v0.0.0-20210227130457-95e7cc81f5e2 github.com/jaypipes/ghw v0.9.0 diff --git a/go.sum b/go.sum index 3d1ec41526..b33662725d 100644 --- a/go.sum +++ b/go.sum @@ -182,6 +182,8 @@ github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2 h1:CoAavW/wd/kulfZmSIBt6p24n4j7tHgNVCjsfHVNUbo= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/gocarina/gocsv v0.0.0-20220927221512-ad3251f9fa25 h1:wxgEEZvsnOTrDO2npSSKUMDx5IykfoGmro+/Vjc1BQ8= +github.com/gocarina/gocsv v0.0.0-20220927221512-ad3251f9fa25/go.mod h1:5YoVOkjYAQumqlV356Hj3xeYh4BdZuLE0/nRkf2NKkI= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= diff --git a/pkg/servicedisc/autoconnect.go b/pkg/servicedisc/autoconnect.go index 6f64bb64cd..3cc1ac4f0d 100644 --- a/pkg/servicedisc/autoconnect.go +++ b/pkg/servicedisc/autoconnect.go @@ -60,6 +60,8 @@ func (a *autoconnector) Run(ctx context.Context) (err error) { for { select { + case <-ctx.Done(): + return context.Canceled case <-publicServiceTicket.C: // successfully established transports tps := a.tm.GetTransportsByLabel(transport.LabelAutomatic) @@ -93,8 +95,6 @@ func (a *autoconnector) Run(ctx context.Context) (err error) { } } } - case <-ctx.Done(): - return context.Canceled } } } diff --git a/pkg/skyenv/values.go b/pkg/skyenv/values.go index 7ee0b83a62..6b92596b2d 100644 --- a/pkg/skyenv/values.go +++ b/pkg/skyenv/values.go @@ -12,6 +12,7 @@ import ( "github.com/bitfield/script" "github.com/google/uuid" "github.com/jaypipes/ghw" + "github.com/skycoin/dmsg/pkg/dmsg" "github.com/skycoin/skywire-utilities/pkg/buildinfo" "github.com/skycoin/skywire-utilities/pkg/cipher" @@ -29,12 +30,12 @@ const ( // Dmsg port constants. // TODO(evanlinjin): Define these properly. These are currently random. const ( - DmsgCtrlPort uint16 = 7 // Listening port for dmsgctrl protocol (similar to TCP Echo Protocol). - DmsgSetupPort uint16 = 36 // Listening port of a setup node. - DmsgHypervisorPort uint16 = 46 // Listening port of a hypervisor for incoming RPC visor connections over dmsg. - DmsgTransportSetupPort uint16 = 47 // Listening port for transport setup RPC over dmsg. - DmsgHTTPPort uint16 = 80 // Listening port for dmsghttp logserver. - DmsgAwaitSetupPort uint16 = 136 // Listening port of a visor for setup operations. + DmsgCtrlPort uint16 = 7 // Listening port for dmsgctrl protocol (similar to TCP Echo Protocol). + DmsgSetupPort uint16 = 36 // Listening port of a setup node. + DmsgHypervisorPort uint16 = 46 // Listening port of a hypervisor for incoming RPC visor connections over dmsg. + DmsgTransportSetupPort uint16 = 47 // Listening port for transport setup RPC over dmsg. + DmsgHTTPPort uint16 = dmsg.DefaultDmsgHTTPPort // Listening port for dmsghttp logserver. + DmsgAwaitSetupPort uint16 = 136 // Listening port of a visor for setup operations. ) // Transport port constants. @@ -93,7 +94,8 @@ const ( // Routing constants const ( - TpLogStore = "./transport_logs" + TpLogStore = "transport_logs" + Custom = "custom" ) // Local constants diff --git a/pkg/transport/log.go b/pkg/transport/log.go index fbb5c5c73c..730d37ed98 100644 --- a/pkg/transport/log.go +++ b/pkg/transport/log.go @@ -3,7 +3,6 @@ package transport import ( "bytes" "encoding/gob" - "encoding/json" "errors" "fmt" "os" @@ -11,18 +10,28 @@ import ( "strconv" "sync" "sync/atomic" + "time" + "github.com/gocarina/gocsv" "github.com/google/uuid" "github.com/skycoin/skywire-utilities/pkg/logging" ) +// CsvEntry represents a logging entry for csv for a given Transport. +type CsvEntry struct { + TpID uuid.UUID `csv:"tp_id"` + // atomic requires 64-bit alignment for struct field access + LogEntry + TimeStamp time.Time `csv:"time_stamp"` // TimeStamp should be time.RFC3339Nano formatted +} + // LogEntry represents a logging entry for a given Transport. // The entry is updated every time a packet is received or sent. type LogEntry struct { // atomic requires 64-bit alignment for struct field access - RecvBytes uint64 `json:"recv"` // Total received bytes. - SentBytes uint64 `json:"sent"` // Total sent bytes. + RecvBytes uint64 `csv:"recv"` // Total received bytes. + SentBytes uint64 `csv:"sent"` // Total sent bytes. } // AddRecv records read. @@ -118,46 +127,103 @@ type fileTransportLogStore struct { // FileTransportLogStore implements file TransportLogStore. func FileTransportLogStore(dir string) (LogStore, error) { - if err := os.MkdirAll(dir, 0707); err != nil { + if err := os.MkdirAll(dir, 0606); err != nil { return nil, err } log := logging.MustGetLogger("transport") return &fileTransportLogStore{dir, log}, nil } -func (tls *fileTransportLogStore) Entry(id uuid.UUID) (*LogEntry, error) { - f, err := os.Open(filepath.Join(tls.dir, fmt.Sprintf("%s.log", id))) +func (tls *fileTransportLogStore) Entry(tpID uuid.UUID) (*LogEntry, error) { + entries, err := tls.readFromCSV(tls.today()) + if err != nil { + return nil, err + } + for _, entry := range entries { + if entry.TpID == tpID { + return &entry.LogEntry, nil + } + } + return nil, nil +} + +func (tls *fileTransportLogStore) Record(id uuid.UUID, entry *LogEntry) error { + cEntry := &CsvEntry{ + TpID: id, + LogEntry: *entry, + TimeStamp: time.Now().UTC(), + } + + return tls.writeToCSV(cEntry) +} + +func (tls *fileTransportLogStore) writeToCSV(cEntry *CsvEntry) error { + f, err := os.OpenFile(filepath.Join(tls.dir, fmt.Sprintf("%s.csv", tls.today())), os.O_RDWR|os.O_CREATE, os.ModePerm) if err != nil { - return nil, fmt.Errorf("open: %w", err) + return err } + defer func() { if err := f.Close(); err != nil { - tls.log.WithError(err).Warn("Failed to close file") + tls.log.WithError(err).Errorln("Failed to close hypervisor response body") } }() - entry := &LogEntry{} - if err := json.NewDecoder(f).Decode(entry); err != nil { - return nil, fmt.Errorf("json: %w", err) + readClients := []*CsvEntry{} + writeClients := []*CsvEntry{} + + if err := gocsv.UnmarshalFile(f, &readClients); err != nil && !errors.Is(err, gocsv.ErrEmptyCSVFile) { // Load clients from file + return err } - return entry, nil + if len(readClients) == 0 { + writeClients = append(writeClients, cEntry) + } + + for _, client := range readClients { + if client.TpID == cEntry.TpID { + writeClients = append(writeClients, cEntry) + continue + } + writeClients = append(writeClients, client) + } + + if _, err := f.Seek(0, 0); err != nil { // Go to the start of the file + return err + } + + _, err = gocsv.MarshalString(&writeClients) // Get all clients as CSV string + if err != nil { + return err + } + + err = gocsv.MarshalFile(&writeClients, f) // Use this to save the CSV back to the file + if err != nil { + return err + } + return nil } -func (tls *fileTransportLogStore) Record(id uuid.UUID, entry *LogEntry) error { - f, err := os.OpenFile(filepath.Join(tls.dir, fmt.Sprintf("%s.log", id)), os.O_RDWR|os.O_CREATE, 0600) +func (tls *fileTransportLogStore) readFromCSV(fileName string) ([]*CsvEntry, error) { + f, err := os.OpenFile(filepath.Join(tls.dir, fmt.Sprint(fileName)), os.O_RDWR|os.O_CREATE, os.ModePerm) if err != nil { - return fmt.Errorf("open: %w", err) + return nil, err } + defer func() { if err := f.Close(); err != nil { - tls.log.WithError(err).Warn("Failed to close file") + tls.log.WithError(err).Errorln("Failed to close hypervisor response body") } }() - if err := json.NewEncoder(f).Encode(entry); err != nil { - return fmt.Errorf("json: %w", err) + readClients := []*CsvEntry{} + + if err := gocsv.UnmarshalFile(f, &readClients); err != nil && !errors.Is(err, gocsv.ErrEmptyCSVFile) { // Load clients from file + return nil, err } + return readClients, nil +} - return nil +func (tls *fileTransportLogStore) today() string { + return time.Now().UTC().Format("2006-01-02") } diff --git a/pkg/visor/init.go b/pkg/visor/init.go index 50b7a27246..01a9e81afd 100644 --- a/pkg/visor/init.go +++ b/pkg/visor/init.go @@ -15,6 +15,7 @@ import ( "time" "github.com/ccding/go-stun/stun" + "github.com/sirupsen/logrus" "github.com/skycoin/dmsg/pkg/direct" dmsgdisc "github.com/skycoin/dmsg/pkg/disc" "github.com/skycoin/dmsg/pkg/dmsg" @@ -47,6 +48,7 @@ import ( "github.com/skycoin/skywire/pkg/utclient" "github.com/skycoin/skywire/pkg/util/osutil" "github.com/skycoin/skywire/pkg/visor/dmsgtracker" + "github.com/skycoin/skywire/pkg/visor/logserver" "github.com/skycoin/skywire/pkg/visor/visorconfig" vinit "github.com/skycoin/skywire/pkg/visor/visorinit" ) @@ -352,6 +354,16 @@ func initDmsgHTTPLogServer(ctx context.Context, v *Visor, log *logging.Logger) e } logger := v.MasterLogger().PackageLogger("dmsghttp_logserver") + tpLogPath := v.conf.LocalPath + "/" + skyenv.TpLogStore + customPath := v.conf.LocalPath + "/" + skyenv.Custom + + var printLog bool + if v.MasterLogger().GetLevel() == logrus.DebugLevel || v.MasterLogger().GetLevel() == logrus.TraceLevel { + printLog = true + } + + lsAPI := logserver.New(logger, tpLogPath, v.conf.LocalPath, customPath, printLog) + lis, err := dmsgC.Listen(skyenv.DmsgHTTPPort) if err != nil { return err @@ -362,11 +374,13 @@ func initDmsgHTTPLogServer(ctx context.Context, v *Visor, log *logging.Logger) e logger.WithError(err).Error() } }() + + log.WithField("dmsg_addr", fmt.Sprintf("dmsg://%v", lis.Addr().String())). + Debug("Serving...") srv := &http.Server{ - ReadHeaderTimeout: 5 * time.Second, - ReadTimeout: 5 * time.Second, - WriteTimeout: 10 * time.Second, - Handler: http.FileServer(http.Dir(v.conf.LocalPath)), + ReadHeaderTimeout: 2 * time.Second, + IdleTimeout: 30 * time.Second, + Handler: lsAPI, } wg := new(sync.WaitGroup) @@ -449,7 +463,10 @@ func initTransport(ctx context.Context, v *Visor, log *logging.Logger) error { return err } - logS := transport.InMemoryTransportLogStore() + logS, err := transport.FileTransportLogStore(v.conf.LocalPath + "/" + skyenv.TpLogStore) + if err != nil { + return err + } pTps, err := v.conf.GetPersistentTransports() if err != nil { @@ -579,8 +596,8 @@ func getRouteSetupHooks(ctx context.Context, v *Visor, log *logging.Logger) []ro trySUDPH := false for _, trans := range transports { - ntype := network.Type(trans) - if ntype == network.STCPR { + nType := network.Type(trans) + if nType == network.STCPR { trySTCPR = true continue } @@ -589,7 +606,7 @@ func getRouteSetupHooks(ctx context.Context, v *Visor, log *logging.Logger) []ro <-v.stunReady // skip if SUDPH is under symmetric NAT / under UDP firewall. - if ntype == network.SUDPH && (v.stunClient.NATType == stun.NATSymmetric || + if nType == network.SUDPH && (v.stunClient.NATType == stun.NATSymmetric || v.stunClient.NATType == stun.NATSymmetricUDPFirewall) { continue } diff --git a/pkg/visor/logserver/api.go b/pkg/visor/logserver/api.go new file mode 100644 index 0000000000..48eeabfde5 --- /dev/null +++ b/pkg/visor/logserver/api.go @@ -0,0 +1,90 @@ +// Package logserver contains api's for the logserver +package logserver + +import ( + "encoding/json" + "net/http" + "time" + + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + "github.com/sirupsen/logrus" + + "github.com/skycoin/skywire-utilities/pkg/buildinfo" + "github.com/skycoin/skywire-utilities/pkg/httputil" + "github.com/skycoin/skywire-utilities/pkg/logging" + "github.com/skycoin/skywire/pkg/skyenv" +) + +// API register all the API endpoints. +// It implements a net/http.Handler. +type API struct { + http.Handler + + logger *logging.Logger + startedAt time.Time +} + +// New creates a new api. +func New(log *logging.Logger, tpLogPath, localPath, customPath string, printLog bool) *API { + api := &API{ + logger: log, + startedAt: time.Now(), + } + + r := chi.NewRouter() + + r.Use(middleware.RequestID) + r.Use(middleware.RealIP) + if printLog { + r.Use(middleware.Logger) + r.Use(middleware.Recoverer) + } + r.Use(httputil.SetLoggerMiddleware(log)) + + r.Get("/health", api.health) + + fsTP := http.FileServer(http.Dir(tpLogPath)) + r.Handle("/*", http.StripPrefix("/", fsTP)) + + fsLocal := http.FileServer(http.Dir(localPath)) + r.Handle("/"+skyenv.SurveyFile, http.StripPrefix("/", fsLocal)) + + r.Handle("/"+skyenv.PrivFile, http.StripPrefix("/", fsLocal)) + + fsCustom := http.FileServer(http.Dir(customPath)) + r.Handle("/*", http.StripPrefix("/", fsCustom)) + + api.Handler = r + return api +} + +func (api *API) health(w http.ResponseWriter, r *http.Request) { + info := buildinfo.Get() + api.writeJSON(w, r, http.StatusOK, httputil.HealthCheckResponse{ + BuildInfo: info, + StartedAt: api.startedAt, + }) +} + +func (api *API) writeJSON(w http.ResponseWriter, r *http.Request, code int, object interface{}) { + jsonObject, err := json.Marshal(object) + if err != nil { + api.log(r).WithError(err).Errorf("failed to encode json response") + w.WriteHeader(http.StatusInternalServerError) + + return + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(code) + + _, err = w.Write(jsonObject) + if err != nil { + api.log(r).WithError(err).Errorf("failed to write json response") + } +} + +func (api *API) log(r *http.Request) logrus.FieldLogger { + return httputil.GetLogger(r) +} diff --git a/vendor/github.com/gocarina/gocsv/.gitignore b/vendor/github.com/gocarina/gocsv/.gitignore new file mode 100644 index 0000000000..485dee64bc --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/.gitignore @@ -0,0 +1 @@ +.idea diff --git a/vendor/github.com/gocarina/gocsv/.travis.yml b/vendor/github.com/gocarina/gocsv/.travis.yml new file mode 100644 index 0000000000..61c24c6c97 --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/.travis.yml @@ -0,0 +1,4 @@ +language: go +arch: + - amd64 + - ppc64le diff --git a/vendor/github.com/gocarina/gocsv/LICENSE b/vendor/github.com/gocarina/gocsv/LICENSE new file mode 100644 index 0000000000..052a371193 --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Jonathan Picques + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/gocarina/gocsv/README.md b/vendor/github.com/gocarina/gocsv/README.md new file mode 100644 index 0000000000..085f747dae --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/README.md @@ -0,0 +1,173 @@ +Go CSV +===== + +The GoCSV package aims to provide easy serialization and deserialization functions to use CSV in Golang + +API and techniques inspired from https://godoc.org/gopkg.in/mgo.v2 + +[![GoDoc](https://godoc.org/github.com/gocarina/gocsv?status.png)](https://godoc.org/github.com/gocarina/gocsv) +[![Build Status](https://travis-ci.org/gocarina/gocsv.svg?branch=master)](https://travis-ci.org/gocarina/gocsv) + +Installation +===== + +```go get -u github.com/gocarina/gocsv``` + +Full example +===== + +Consider the following CSV file + +```csv + +client_id,client_name,client_age +1,Jose,42 +2,Daniel,26 +3,Vincent,32 + +``` + +Easy binding in Go! +--- + +```go + +package main + +import ( + "fmt" + "os" + + "github.com/gocarina/gocsv" +) + +type NotUsed struct { + Name string +} + +type Client struct { // Our example struct, you can use "-" to ignore a field + Id string `csv:"client_id"` + Name string `csv:"client_name"` + Age string `csv:"client_age"` + NotUsedString string `csv:"-"` + NotUsedStruct NotUsed `csv:"-"` +} + +func main() { + clientsFile, err := os.OpenFile("clients.csv", os.O_RDWR|os.O_CREATE, os.ModePerm) + if err != nil { + panic(err) + } + defer clientsFile.Close() + + clients := []*Client{} + + if err := gocsv.UnmarshalFile(clientsFile, &clients); err != nil { // Load clients from file + panic(err) + } + for _, client := range clients { + fmt.Println("Hello", client.Name) + } + + if _, err := clientsFile.Seek(0, 0); err != nil { // Go to the start of the file + panic(err) + } + + clients = append(clients, &Client{Id: "12", Name: "John", Age: "21"}) // Add clients + clients = append(clients, &Client{Id: "13", Name: "Fred"}) + clients = append(clients, &Client{Id: "14", Name: "James", Age: "32"}) + clients = append(clients, &Client{Id: "15", Name: "Danny"}) + csvContent, err := gocsv.MarshalString(&clients) // Get all clients as CSV string + //err = gocsv.MarshalFile(&clients, clientsFile) // Use this to save the CSV back to the file + if err != nil { + panic(err) + } + fmt.Println(csvContent) // Display all clients as CSV string + +} + +``` + +Customizable Converters +--- + +```go + +type DateTime struct { + time.Time +} + +// Convert the internal date as CSV string +func (date *DateTime) MarshalCSV() (string, error) { + return date.Time.Format("20060201"), nil +} + +// You could also use the standard Stringer interface +func (date *DateTime) String() (string) { + return date.String() // Redundant, just for example +} + +// Convert the CSV string as internal date +func (date *DateTime) UnmarshalCSV(csv string) (err error) { + date.Time, err = time.Parse("20060201", csv) + return err +} + +type Client struct { // Our example struct with a custom type (DateTime) + Id string `csv:"id"` + Name string `csv:"name"` + Employed DateTime `csv:"employed"` +} + +``` + +Customizable CSV Reader / Writer +--- + +```go + +func main() { + ... + + gocsv.SetCSVReader(func(in io.Reader) gocsv.CSVReader { + r := csv.NewReader(in) + r.Comma = '|' + return r // Allows use pipe as delimiter + }) + + ... + + gocsv.SetCSVReader(func(in io.Reader) gocsv.CSVReader { + r := csv.NewReader(in) + r.LazyQuotes = true + r.Comma = '.' + return r // Allows use dot as delimiter and use quotes in CSV + }) + + ... + + gocsv.SetCSVReader(func(in io.Reader) gocsv.CSVReader { + //return csv.NewReader(in) + return gocsv.LazyCSVReader(in) // Allows use of quotes in CSV + }) + + ... + + gocsv.UnmarshalFile(file, &clients) + + ... + + gocsv.SetCSVWriter(func(out io.Writer) *gocsv.SafeCSVWriter { + writer := csv.NewWriter(out) + writer.Comma = '|' + return gocsv.NewSafeCSVWriter(writer) + }) + + ... + + gocsv.MarshalFile(&clients, file) + + ... +} + +``` diff --git a/vendor/github.com/gocarina/gocsv/csv.go b/vendor/github.com/gocarina/gocsv/csv.go new file mode 100644 index 0000000000..3ba3efb90b --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/csv.go @@ -0,0 +1,537 @@ +// Copyright 2014 Jonathan Picques. All rights reserved. +// Use of this source code is governed by a MIT license +// The license can be found in the LICENSE file. + +// The GoCSV package aims to provide easy CSV serialization and deserialization to the golang programming language + +package gocsv + +import ( + "bytes" + "encoding/csv" + "fmt" + "io" + "os" + "reflect" + "strings" + "sync" +) + +// FailIfUnmatchedStructTags indicates whether it is considered an error when there is an unmatched +// struct tag. +var FailIfUnmatchedStructTags = false + +// FailIfDoubleHeaderNames indicates whether it is considered an error when a header name is repeated +// in the csv header. +var FailIfDoubleHeaderNames = false + +// ShouldAlignDuplicateHeadersWithStructFieldOrder indicates whether we should align duplicate CSV +// headers per their alignment in the struct definition. +var ShouldAlignDuplicateHeadersWithStructFieldOrder = false + +// TagName defines key in the struct field's tag to scan +var TagName = "csv" + +// TagSeparator defines seperator string for multiple csv tags in struct fields +var TagSeparator = "," + +// Normalizer is a function that takes and returns a string. It is applied to +// struct and header field values before they are compared. It can be used to alter +// names for comparison. For instance, you could allow case insensitive matching +// or convert '-' to '_'. +type Normalizer func(string) string + +type ErrorHandler func(*csv.ParseError) bool + +// normalizeName function initially set to a nop Normalizer. +var normalizeName = DefaultNameNormalizer() + +// DefaultNameNormalizer is a nop Normalizer. +func DefaultNameNormalizer() Normalizer { return func(s string) string { return s } } + +// SetHeaderNormalizer sets the normalizer used to normalize struct and header field names. +func SetHeaderNormalizer(f Normalizer) { + normalizeName = f + // Need to clear the cache hen the header normalizer changes. + structInfoCache = sync.Map{} +} + +// -------------------------------------------------------------------------- +// CSVWriter used to format CSV + +var selfCSVWriter = DefaultCSVWriter + +// DefaultCSVWriter is the default SafeCSVWriter used to format CSV (cf. csv.NewWriter) +func DefaultCSVWriter(out io.Writer) *SafeCSVWriter { + writer := NewSafeCSVWriter(csv.NewWriter(out)) + + // As only one rune can be defined as a CSV separator, we are going to trim + // the custom tag separator and use the first rune. + if runes := []rune(strings.TrimSpace(TagSeparator)); len(runes) > 0 { + writer.Comma = runes[0] + } + + return writer +} + +// SetCSVWriter sets the SafeCSVWriter used to format CSV. +func SetCSVWriter(csvWriter func(io.Writer) *SafeCSVWriter) { + selfCSVWriter = csvWriter +} + +func getCSVWriter(out io.Writer) *SafeCSVWriter { + return selfCSVWriter(out) +} + +// -------------------------------------------------------------------------- +// CSVReader used to parse CSV + +var selfCSVReader = DefaultCSVReader + +// DefaultCSVReader is the default CSV reader used to parse CSV (cf. csv.NewReader) +func DefaultCSVReader(in io.Reader) CSVReader { + return csv.NewReader(in) +} + +// LazyCSVReader returns a lazy CSV reader, with LazyQuotes and TrimLeadingSpace. +func LazyCSVReader(in io.Reader) CSVReader { + csvReader := csv.NewReader(in) + csvReader.LazyQuotes = true + csvReader.TrimLeadingSpace = true + return csvReader +} + +// SetCSVReader sets the CSV reader used to parse CSV. +func SetCSVReader(csvReader func(io.Reader) CSVReader) { + selfCSVReader = csvReader +} + +func getCSVReader(in io.Reader) CSVReader { + return selfCSVReader(in) +} + +// -------------------------------------------------------------------------- +// Marshal functions + +// MarshalFile saves the interface as CSV in the file. +func MarshalFile(in interface{}, file *os.File) (err error) { + return Marshal(in, file) +} + +// MarshalString returns the CSV string from the interface. +func MarshalString(in interface{}) (out string, err error) { + bufferString := bytes.NewBufferString(out) + if err := Marshal(in, bufferString); err != nil { + return "", err + } + return bufferString.String(), nil +} + +// MarshalStringWithoutHeaders returns the CSV string from the interface. +func MarshalStringWithoutHeaders(in interface{}) (out string, err error) { + bufferString := bytes.NewBufferString(out) + if err := MarshalWithoutHeaders(in, bufferString); err != nil { + return "", err + } + return bufferString.String(), nil +} + +// MarshalBytes returns the CSV bytes from the interface. +func MarshalBytes(in interface{}) (out []byte, err error) { + bufferString := bytes.NewBuffer(out) + if err := Marshal(in, bufferString); err != nil { + return nil, err + } + return bufferString.Bytes(), nil +} + +// Marshal returns the CSV in writer from the interface. +func Marshal(in interface{}, out io.Writer) (err error) { + writer := getCSVWriter(out) + return writeTo(writer, in, false) +} + +// MarshalWithoutHeaders returns the CSV in writer from the interface. +func MarshalWithoutHeaders(in interface{}, out io.Writer) (err error) { + writer := getCSVWriter(out) + return writeTo(writer, in, true) +} + +// MarshalChan returns the CSV read from the channel. +func MarshalChan(c <-chan interface{}, out CSVWriter) error { + return writeFromChan(out, c, false) +} + +// MarshalChanWithoutHeaders returns the CSV read from the channel. +func MarshalChanWithoutHeaders(c <-chan interface{}, out CSVWriter) error { + return writeFromChan(out, c, true) +} + +// MarshalCSV returns the CSV in writer from the interface. +func MarshalCSV(in interface{}, out CSVWriter) (err error) { + return writeTo(out, in, false) +} + +// MarshalCSVWithoutHeaders returns the CSV in writer from the interface. +func MarshalCSVWithoutHeaders(in interface{}, out CSVWriter) (err error) { + return writeTo(out, in, true) +} + +// -------------------------------------------------------------------------- +// Unmarshal functions + +// UnmarshalFile parses the CSV from the file in the interface. +func UnmarshalFile(in *os.File, out interface{}) error { + return Unmarshal(in, out) +} + +// UnmarshalFile parses the CSV from the file in the interface. +func UnmarshalFileWithErrorHandler(in *os.File, errHandler ErrorHandler, out interface{}) error { + return UnmarshalWithErrorHandler(in, errHandler, out) +} + +// UnmarshalString parses the CSV from the string in the interface. +func UnmarshalString(in string, out interface{}) error { + return Unmarshal(strings.NewReader(in), out) +} + +// UnmarshalBytes parses the CSV from the bytes in the interface. +func UnmarshalBytes(in []byte, out interface{}) error { + return Unmarshal(bytes.NewReader(in), out) +} + +// Unmarshal parses the CSV from the reader in the interface. +func Unmarshal(in io.Reader, out interface{}) error { + return readTo(newSimpleDecoderFromReader(in), out) +} + +// Unmarshal parses the CSV from the reader in the interface. +func UnmarshalWithErrorHandler(in io.Reader, errHandle ErrorHandler, out interface{}) error { + return readToWithErrorHandler(newSimpleDecoderFromReader(in), errHandle, out) +} + +// UnmarshalWithoutHeaders parses the CSV from the reader in the interface. +func UnmarshalWithoutHeaders(in io.Reader, out interface{}) error { + return readToWithoutHeaders(newSimpleDecoderFromReader(in), out) +} + +// UnmarshalCSVWithoutHeaders parses a headerless CSV with passed in CSV reader +func UnmarshalCSVWithoutHeaders(in CSVReader, out interface{}) error { + return readToWithoutHeaders(csvDecoder{in}, out) +} + +// UnmarshalDecoder parses the CSV from the decoder in the interface +func UnmarshalDecoder(in Decoder, out interface{}) error { + return readTo(in, out) +} + +// UnmarshalCSV parses the CSV from the reader in the interface. +func UnmarshalCSV(in CSVReader, out interface{}) error { + return readTo(csvDecoder{in}, out) +} + +// UnmarshalCSVToMap parses a CSV of 2 columns into a map. +func UnmarshalCSVToMap(in CSVReader, out interface{}) error { + decoder := NewSimpleDecoderFromCSVReader(in) + header, err := decoder.GetCSVRow() + if err != nil { + return err + } + if len(header) != 2 { + return fmt.Errorf("maps can only be created for csv of two columns") + } + outValue, outType := getConcreteReflectValueAndType(out) + if outType.Kind() != reflect.Map { + return fmt.Errorf("cannot use " + outType.String() + ", only map supported") + } + keyType := outType.Key() + valueType := outType.Elem() + outValue.Set(reflect.MakeMap(outType)) + for { + key := reflect.New(keyType) + value := reflect.New(valueType) + line, err := decoder.GetCSVRow() + if err == io.EOF { + break + } else if err != nil { + return err + } + if err := setField(key, line[0], false); err != nil { + return err + } + if err := setField(value, line[1], false); err != nil { + return err + } + outValue.SetMapIndex(key.Elem(), value.Elem()) + } + return nil +} + +// UnmarshalToChan parses the CSV from the reader and send each value in the chan c. +// The channel must have a concrete type. +func UnmarshalToChan(in io.Reader, c interface{}) error { + if c == nil { + return fmt.Errorf("goscv: channel is %v", c) + } + return readEach(newSimpleDecoderFromReader(in), c) +} + +// UnmarshalToChanWithoutHeaders parses the CSV from the reader and send each value in the chan c. +// The channel must have a concrete type. +func UnmarshalToChanWithoutHeaders(in io.Reader, c interface{}) error { + if c == nil { + return fmt.Errorf("goscv: channel is %v", c) + } + return readEachWithoutHeaders(newSimpleDecoderFromReader(in), c) +} + +// UnmarshalDecoderToChan parses the CSV from the decoder and send each value in the chan c. +// The channel must have a concrete type. +func UnmarshalDecoderToChan(in SimpleDecoder, c interface{}) error { + if c == nil { + return fmt.Errorf("goscv: channel is %v", c) + } + return readEach(in, c) +} + +// UnmarshalStringToChan parses the CSV from the string and send each value in the chan c. +// The channel must have a concrete type. +func UnmarshalStringToChan(in string, c interface{}) error { + return UnmarshalToChan(strings.NewReader(in), c) +} + +// UnmarshalBytesToChan parses the CSV from the bytes and send each value in the chan c. +// The channel must have a concrete type. +func UnmarshalBytesToChan(in []byte, c interface{}) error { + return UnmarshalToChan(bytes.NewReader(in), c) +} + +// UnmarshalToCallback parses the CSV from the reader and send each value to the given func f. +// The func must look like func(Struct). +func UnmarshalToCallback(in io.Reader, f interface{}) error { + valueFunc := reflect.ValueOf(f) + t := reflect.TypeOf(f) + if t.NumIn() != 1 { + return fmt.Errorf("the given function must have exactly one parameter") + } + cerr := make(chan error) + c := reflect.MakeChan(reflect.ChanOf(reflect.BothDir, t.In(0)), 0) + go func() { + cerr <- UnmarshalToChan(in, c.Interface()) + }() + for { + select { + case err := <-cerr: + return err + default: + } + v, notClosed := c.Recv() + if !notClosed || v.Interface() == nil { + break + } + callResults := valueFunc.Call([]reflect.Value{v}) + // if last returned value from Call() is an error, return it + if len(callResults) > 0 { + if err, ok := callResults[len(callResults)-1].Interface().(error); ok { + return err + } + } + } + return nil +} + +// UnmarshalDecoderToCallback parses the CSV from the decoder and send each value to the given func f. +// The func must look like func(Struct). +func UnmarshalDecoderToCallback(in SimpleDecoder, f interface{}) error { + valueFunc := reflect.ValueOf(f) + t := reflect.TypeOf(f) + if t.NumIn() != 1 { + return fmt.Errorf("the given function must have exactly one parameter") + } + cerr := make(chan error) + c := reflect.MakeChan(reflect.ChanOf(reflect.BothDir, t.In(0)), 0) + go func() { + cerr <- UnmarshalDecoderToChan(in, c.Interface()) + }() + for { + select { + case err := <-cerr: + return err + default: + } + v, notClosed := c.Recv() + if !notClosed || v.Interface() == nil { + break + } + valueFunc.Call([]reflect.Value{v}) + } + return nil +} + +// UnmarshalBytesToCallback parses the CSV from the bytes and send each value to the given func f. +// The func must look like func(Struct). +func UnmarshalBytesToCallback(in []byte, f interface{}) error { + return UnmarshalToCallback(bytes.NewReader(in), f) +} + +// UnmarshalStringToCallback parses the CSV from the string and send each value to the given func f. +// The func must look like func(Struct). +func UnmarshalStringToCallback(in string, c interface{}) (err error) { + return UnmarshalToCallback(strings.NewReader(in), c) +} + +// UnmarshalToCallbackWithError parses the CSV from the reader and +// send each value to the given func f. +// +// If func returns error, it will stop processing, drain the +// parser and propagate the error to caller. +// +// The func must look like func(Struct) error. +func UnmarshalToCallbackWithError(in io.Reader, f interface{}) error { + valueFunc := reflect.ValueOf(f) + t := reflect.TypeOf(f) + if t.NumIn() != 1 { + return fmt.Errorf("the given function must have exactly one parameter") + } + if t.NumOut() != 1 { + return fmt.Errorf("the given function must have exactly one return value") + } + if !isErrorType(t.Out(0)) { + return fmt.Errorf("the given function must only return error") + } + + cerr := make(chan error) + c := reflect.MakeChan(reflect.ChanOf(reflect.BothDir, t.In(0)), 0) + go func() { + cerr <- UnmarshalToChan(in, c.Interface()) + }() + + var fErr error + for { + select { + case err := <-cerr: + if err != nil { + return err + } + return fErr + default: + } + v, notClosed := c.Recv() + if !notClosed || v.Interface() == nil { + if err := <-cerr; err != nil { + fErr = err + } + break + } + + // callback f has already returned an error, stop processing but keep draining the chan c + if fErr != nil { + continue + } + + results := valueFunc.Call([]reflect.Value{v}) + + // If the callback f returns an error, stores it and returns it in future. + errValue := results[0].Interface() + if errValue != nil { + fErr = errValue.(error) + } + } + return fErr +} + +// UnmarshalBytesToCallbackWithError parses the CSV from the bytes and +// send each value to the given func f. +// +// If func returns error, it will stop processing, drain the +// parser and propagate the error to caller. +// +// The func must look like func(Struct) error. +func UnmarshalBytesToCallbackWithError(in []byte, f interface{}) error { + return UnmarshalToCallbackWithError(bytes.NewReader(in), f) +} + +// UnmarshalStringToCallbackWithError parses the CSV from the string and +// send each value to the given func f. +// +// If func returns error, it will stop processing, drain the +// parser and propagate the error to caller. +// +// The func must look like func(Struct) error. +func UnmarshalStringToCallbackWithError(in string, c interface{}) (err error) { + return UnmarshalToCallbackWithError(strings.NewReader(in), c) +} + +// CSVToMap creates a simple map from a CSV of 2 columns. +func CSVToMap(in io.Reader) (map[string]string, error) { + decoder := newSimpleDecoderFromReader(in) + header, err := decoder.GetCSVRow() + if err != nil { + return nil, err + } + if len(header) != 2 { + return nil, fmt.Errorf("maps can only be created for csv of two columns") + } + m := make(map[string]string) + for { + line, err := decoder.GetCSVRow() + if err == io.EOF { + break + } else if err != nil { + return nil, err + } + m[line[0]] = line[1] + } + return m, nil +} + +// CSVToMaps takes a reader and returns an array of dictionaries, using the header row as the keys +func CSVToMaps(reader io.Reader) ([]map[string]string, error) { + r := csv.NewReader(reader) + rows := []map[string]string{} + var header []string + for { + record, err := r.Read() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + if header == nil { + header = record + } else { + dict := map[string]string{} + for i := range header { + dict[header[i]] = record[i] + } + rows = append(rows, dict) + } + } + return rows, nil +} + +// CSVToChanMaps parses the CSV from the reader and send a dictionary in the chan c, using the header row as the keys. +func CSVToChanMaps(reader io.Reader, c chan<- map[string]string) error { + r := csv.NewReader(reader) + var header []string + for { + record, err := r.Read() + if err == io.EOF { + break + } + if err != nil { + return err + } + if header == nil { + header = record + } else { + dict := map[string]string{} + for i := range header { + dict[header[i]] = record[i] + } + c <- dict + } + } + return nil +} diff --git a/vendor/github.com/gocarina/gocsv/decode.go b/vendor/github.com/gocarina/gocsv/decode.go new file mode 100644 index 0000000000..537251de8f --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/decode.go @@ -0,0 +1,488 @@ +package gocsv + +import ( + "encoding/csv" + "errors" + "fmt" + "io" + "reflect" +) + +// Decoder . +type Decoder interface { + GetCSVRows() ([][]string, error) +} + +// SimpleDecoder . +type SimpleDecoder interface { + GetCSVRow() ([]string, error) + GetCSVRows() ([][]string, error) +} + +type CSVReader interface { + Read() ([]string, error) + ReadAll() ([][]string, error) +} + +type csvDecoder struct { + CSVReader +} + +func newSimpleDecoderFromReader(r io.Reader) SimpleDecoder { + return csvDecoder{getCSVReader(r)} +} + +var ( + ErrEmptyCSVFile = errors.New("empty csv file given") + ErrNoStructTags = errors.New("no csv struct tags found") +) + +// NewSimpleDecoderFromCSVReader creates a SimpleDecoder, which may be passed +// to the UnmarshalDecoder* family of functions, from a CSV reader. Note that +// encoding/csv.Reader implements CSVReader, so you can pass one of those +// directly here. +func NewSimpleDecoderFromCSVReader(r CSVReader) SimpleDecoder { + return csvDecoder{r} +} + +func (c csvDecoder) GetCSVRows() ([][]string, error) { + return c.ReadAll() +} + +func (c csvDecoder) GetCSVRow() ([]string, error) { + return c.Read() +} + +func mismatchStructFields(structInfo []fieldInfo, headers []string) []string { + missing := make([]string, 0) + if len(structInfo) == 0 { + return missing + } + + headerMap := make(map[string]struct{}, len(headers)) + for idx := range headers { + headerMap[headers[idx]] = struct{}{} + } + + for _, info := range structInfo { + found := false + for _, key := range info.keys { + if _, ok := headerMap[key]; ok { + found = true + break + } + } + if !found { + missing = append(missing, info.keys...) + } + } + return missing +} + +func mismatchHeaderFields(structInfo []fieldInfo, headers []string) []string { + missing := make([]string, 0) + if len(headers) == 0 { + return missing + } + + keyMap := make(map[string]struct{}) + for _, info := range structInfo { + for _, key := range info.keys { + keyMap[key] = struct{}{} + } + } + + for _, header := range headers { + if _, ok := keyMap[header]; !ok { + missing = append(missing, header) + } + } + return missing +} + +func maybeMissingStructFields(structInfo []fieldInfo, headers []string) error { + missing := mismatchStructFields(structInfo, headers) + if len(missing) != 0 { + return fmt.Errorf("found unmatched struct field with tags %v", missing) + } + return nil +} + +// Check that no header name is repeated twice +func maybeDoubleHeaderNames(headers []string) error { + headerMap := make(map[string]bool, len(headers)) + for _, v := range headers { + if _, ok := headerMap[v]; ok { + return fmt.Errorf("repeated header name: %v", v) + } + headerMap[v] = true + } + return nil +} + +// apply normalizer func to headers +func normalizeHeaders(headers []string) []string { + out := make([]string, len(headers)) + for i, h := range headers { + out[i] = normalizeName(h) + } + return out +} + +func readTo(decoder Decoder, out interface{}) error { + return readToWithErrorHandler(decoder, nil, out) +} + +func readToWithErrorHandler(decoder Decoder, errHandler ErrorHandler, out interface{}) error { + outValue, outType := getConcreteReflectValueAndType(out) // Get the concrete type (not pointer) (Slice or Array) + if err := ensureOutType(outType); err != nil { + return err + } + outInnerWasPointer, outInnerType := getConcreteContainerInnerType(outType) // Get the concrete inner type (not pointer) (Container<"?">) + if err := ensureOutInnerType(outInnerType); err != nil { + return err + } + csvRows, err := decoder.GetCSVRows() // Get the CSV csvRows + if err != nil { + return err + } + if len(csvRows) == 0 { + return ErrEmptyCSVFile + } + if err := ensureOutCapacity(&outValue, len(csvRows)); err != nil { // Ensure the container is big enough to hold the CSV content + return err + } + outInnerStructInfo := getStructInfo(outInnerType) // Get the inner struct info to get CSV annotations + if len(outInnerStructInfo.Fields) == 0 { + return ErrNoStructTags + } + + headers := normalizeHeaders(csvRows[0]) + body := csvRows[1:] + + csvHeadersLabels := make(map[int]*fieldInfo, len(outInnerStructInfo.Fields)) // Used to store the correspondance header <-> position in CSV + + headerCount := map[string]int{} + for i, csvColumnHeader := range headers { + curHeaderCount := headerCount[csvColumnHeader] + if fieldInfo := getCSVFieldPosition(csvColumnHeader, outInnerStructInfo, curHeaderCount); fieldInfo != nil { + csvHeadersLabels[i] = fieldInfo + if ShouldAlignDuplicateHeadersWithStructFieldOrder { + curHeaderCount++ + headerCount[csvColumnHeader] = curHeaderCount + } + } + } + + if FailIfUnmatchedStructTags { + if err := maybeMissingStructFields(outInnerStructInfo.Fields, headers); err != nil { + return err + } + } + if FailIfDoubleHeaderNames { + if err := maybeDoubleHeaderNames(headers); err != nil { + return err + } + } + + var withFieldsOK bool + var fieldTypeUnmarshallerWithKeys TypeUnmarshalCSVWithFields + + for i, csvRow := range body { + objectIface := reflect.New(outValue.Index(i).Type()).Interface() + outInner := createNewOutInner(outInnerWasPointer, outInnerType) + for j, csvColumnContent := range csvRow { + if fieldInfo, ok := csvHeadersLabels[j]; ok { // Position found accordingly to header name + + if outInner.CanInterface() { + fieldTypeUnmarshallerWithKeys, withFieldsOK = objectIface.(TypeUnmarshalCSVWithFields) + if withFieldsOK { + if err := fieldTypeUnmarshallerWithKeys.UnmarshalCSVWithFields(fieldInfo.getFirstKey(), csvColumnContent); err != nil { + parseError := csv.ParseError{ + Line: i + 2, //add 2 to account for the header & 0-indexing of arrays + Column: j + 1, + Err: err, + } + return &parseError + } + continue + } + } + value := csvColumnContent + if value == "" { + value = fieldInfo.defaultValue + } + if err := setInnerField(&outInner, outInnerWasPointer, fieldInfo.IndexChain, value, fieldInfo.omitEmpty); err != nil { // Set field of struct + parseError := csv.ParseError{ + Line: i + 2, //add 2 to account for the header & 0-indexing of arrays + Column: j + 1, + Err: err, + } + if errHandler == nil || !errHandler(&parseError) { + return &parseError + } + } + } + } + + if withFieldsOK { + reflectedObject := reflect.ValueOf(objectIface) + outInner = reflectedObject.Elem() + } + + outValue.Index(i).Set(outInner) + } + return nil +} + +func readEach(decoder SimpleDecoder, c interface{}) error { + outValue, outType := getConcreteReflectValueAndType(c) // Get the concrete type (not pointer) + if outType.Kind() != reflect.Chan { + return fmt.Errorf("cannot use %v with type %s, only channel supported", c, outType) + } + defer outValue.Close() + + headers, err := decoder.GetCSVRow() + if err != nil { + return err + } + headers = normalizeHeaders(headers) + + outInnerWasPointer, outInnerType := getConcreteContainerInnerType(outType) // Get the concrete inner type (not pointer) (Container<"?">) + if err := ensureOutInnerType(outInnerType); err != nil { + return err + } + outInnerStructInfo := getStructInfo(outInnerType) // Get the inner struct info to get CSV annotations + if len(outInnerStructInfo.Fields) == 0 { + return ErrNoStructTags + } + csvHeadersLabels := make(map[int]*fieldInfo, len(outInnerStructInfo.Fields)) // Used to store the correspondance header <-> position in CSV + headerCount := map[string]int{} + for i, csvColumnHeader := range headers { + curHeaderCount := headerCount[csvColumnHeader] + if fieldInfo := getCSVFieldPosition(csvColumnHeader, outInnerStructInfo, curHeaderCount); fieldInfo != nil { + csvHeadersLabels[i] = fieldInfo + if ShouldAlignDuplicateHeadersWithStructFieldOrder { + curHeaderCount++ + headerCount[csvColumnHeader] = curHeaderCount + } + } + } + if err := maybeMissingStructFields(outInnerStructInfo.Fields, headers); err != nil { + if FailIfUnmatchedStructTags { + return err + } + } + if FailIfDoubleHeaderNames { + if err := maybeDoubleHeaderNames(headers); err != nil { + return err + } + } + i := 0 + for { + line, err := decoder.GetCSVRow() + if err == io.EOF { + break + } else if err != nil { + return err + } + outInner := createNewOutInner(outInnerWasPointer, outInnerType) + for j, csvColumnContent := range line { + if fieldInfo, ok := csvHeadersLabels[j]; ok { // Position found accordingly to header name + if err := setInnerField(&outInner, outInnerWasPointer, fieldInfo.IndexChain, csvColumnContent, fieldInfo.omitEmpty); err != nil { // Set field of struct + return &csv.ParseError{ + Line: i + 2, //add 2 to account for the header & 0-indexing of arrays + Column: j + 1, + Err: err, + } + } + } + } + outValue.Send(outInner) + i++ + } + return nil +} + +func readEachWithoutHeaders(decoder SimpleDecoder, c interface{}) error { + outValue, outType := getConcreteReflectValueAndType(c) // Get the concrete type (not pointer) (Slice or Array) + if err := ensureOutType(outType); err != nil { + return err + } + defer outValue.Close() + + outInnerWasPointer, outInnerType := getConcreteContainerInnerType(outType) // Get the concrete inner type (not pointer) (Container<"?">) + if err := ensureOutInnerType(outInnerType); err != nil { + return err + } + outInnerStructInfo := getStructInfo(outInnerType) // Get the inner struct info to get CSV annotations + if len(outInnerStructInfo.Fields) == 0 { + return ErrNoStructTags + } + + i := 0 + for { + line, err := decoder.GetCSVRow() + if err == io.EOF { + break + } else if err != nil { + return err + } + outInner := createNewOutInner(outInnerWasPointer, outInnerType) + for j, csvColumnContent := range line { + fieldInfo := outInnerStructInfo.Fields[j] + if err := setInnerField(&outInner, outInnerWasPointer, fieldInfo.IndexChain, csvColumnContent, fieldInfo.omitEmpty); err != nil { // Set field of struct + return &csv.ParseError{ + Line: i + 2, //add 2 to account for the header & 0-indexing of arrays + Column: j + 1, + Err: err, + } + } + } + outValue.Send(outInner) + i++ + } + return nil +} + +func readToWithoutHeaders(decoder Decoder, out interface{}) error { + outValue, outType := getConcreteReflectValueAndType(out) // Get the concrete type (not pointer) (Slice or Array) + if err := ensureOutType(outType); err != nil { + return err + } + outInnerWasPointer, outInnerType := getConcreteContainerInnerType(outType) // Get the concrete inner type (not pointer) (Container<"?">) + if err := ensureOutInnerType(outInnerType); err != nil { + return err + } + csvRows, err := decoder.GetCSVRows() // Get the CSV csvRows + if err != nil { + return err + } + if len(csvRows) == 0 { + return ErrEmptyCSVFile + } + if err := ensureOutCapacity(&outValue, len(csvRows)+1); err != nil { // Ensure the container is big enough to hold the CSV content + return err + } + outInnerStructInfo := getStructInfo(outInnerType) // Get the inner struct info to get CSV annotations + if len(outInnerStructInfo.Fields) == 0 { + return ErrNoStructTags + } + + for i, csvRow := range csvRows { + outInner := createNewOutInner(outInnerWasPointer, outInnerType) + for j, csvColumnContent := range csvRow { + fieldInfo := outInnerStructInfo.Fields[j] + if err := setInnerField(&outInner, outInnerWasPointer, fieldInfo.IndexChain, csvColumnContent, fieldInfo.omitEmpty); err != nil { // Set field of struct + return &csv.ParseError{ + Line: i + 1, + Column: j + 1, + Err: err, + } + } + } + outValue.Index(i).Set(outInner) + } + + return nil +} + +// Check if the outType is an array or a slice +func ensureOutType(outType reflect.Type) error { + switch outType.Kind() { + case reflect.Slice: + fallthrough + case reflect.Chan: + fallthrough + case reflect.Array: + return nil + } + return fmt.Errorf("cannot use " + outType.String() + ", only slice or array supported") +} + +// Check if the outInnerType is of type struct +func ensureOutInnerType(outInnerType reflect.Type) error { + switch outInnerType.Kind() { + case reflect.Struct: + return nil + } + return fmt.Errorf("cannot use " + outInnerType.String() + ", only struct supported") +} + +func ensureOutCapacity(out *reflect.Value, csvLen int) error { + switch out.Kind() { + case reflect.Array: + if out.Len() < csvLen-1 { // Array is not big enough to hold the CSV content (arrays are not addressable) + return fmt.Errorf("array capacity problem: cannot store %d %s in %s", csvLen-1, out.Type().Elem().String(), out.Type().String()) + } + case reflect.Slice: + if !out.CanAddr() && out.Len() < csvLen-1 { // Slice is not big enough tho hold the CSV content and is not addressable + return fmt.Errorf("slice capacity problem and is not addressable (did you forget &?)") + } else if out.CanAddr() && out.Len() < csvLen-1 { + out.Set(reflect.MakeSlice(out.Type(), csvLen-1, csvLen-1)) // Slice is not big enough, so grows it + } + } + return nil +} + +func getCSVFieldPosition(key string, structInfo *structInfo, curHeaderCount int) *fieldInfo { + matchedFieldCount := 0 + for _, field := range structInfo.Fields { + if field.matchesKey(key) { + if matchedFieldCount >= curHeaderCount { + return &field + } + matchedFieldCount++ + } + } + return nil +} + +func createNewOutInner(outInnerWasPointer bool, outInnerType reflect.Type) reflect.Value { + if outInnerWasPointer { + return reflect.New(outInnerType) + } + return reflect.New(outInnerType).Elem() +} + +func setInnerField(outInner *reflect.Value, outInnerWasPointer bool, index []int, value string, omitEmpty bool) error { + oi := *outInner + if outInnerWasPointer { + // initialize nil pointer + if oi.IsNil() { + setField(oi, "", omitEmpty) + } + oi = outInner.Elem() + } + + if oi.Kind() == reflect.Slice || oi.Kind() == reflect.Array { + i := index[0] + + // grow slice when needed + if i >= oi.Cap() { + newcap := oi.Cap() + oi.Cap()/2 + if newcap < 4 { + newcap = 4 + } + newoi := reflect.MakeSlice(oi.Type(), oi.Len(), newcap) + reflect.Copy(newoi, oi) + oi.Set(newoi) + } + if i >= oi.Len() { + oi.SetLen(i + 1) + } + + item := oi.Index(i) + if len(index) > 1 { + return setInnerField(&item, false, index[1:], value, omitEmpty) + } + return setField(item, value, omitEmpty) + } + + // because pointers can be nil need to recurse one index at a time and perform nil check + if len(index) > 1 { + nextField := oi.Field(index[0]) + return setInnerField(&nextField, nextField.Kind() == reflect.Ptr, index[1:], value, omitEmpty) + } + return setField(oi.FieldByIndex(index), value, omitEmpty) +} diff --git a/vendor/github.com/gocarina/gocsv/encode.go b/vendor/github.com/gocarina/gocsv/encode.go new file mode 100644 index 0000000000..a7c0e720ef --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/encode.go @@ -0,0 +1,169 @@ +package gocsv + +import ( + "errors" + "fmt" + "io" + "reflect" +) + +var ( + ErrChannelIsClosed = errors.New("channel is closed") +) + +type encoder struct { + out io.Writer +} + +func newEncoder(out io.Writer) *encoder { + return &encoder{out} +} + +func writeFromChan(writer CSVWriter, c <-chan interface{}, omitHeaders bool) error { + // Get the first value. It wil determine the header structure. + firstValue, ok := <-c + if !ok { + return ErrChannelIsClosed + } + inValue, inType := getConcreteReflectValueAndType(firstValue) // Get the concrete type + if err := ensureStructOrPtr(inType); err != nil { + return err + } + inInnerWasPointer := inType.Kind() == reflect.Ptr + inInnerStructInfo := getStructInfo(inType) // Get the inner struct info to get CSV annotations + csvHeadersLabels := make([]string, len(inInnerStructInfo.Fields)) + for i, fieldInfo := range inInnerStructInfo.Fields { // Used to write the header (first line) in CSV + csvHeadersLabels[i] = fieldInfo.getFirstKey() + } + if !omitHeaders { + if err := writer.Write(csvHeadersLabels); err != nil { + return err + } + } + write := func(val reflect.Value) error { + for j, fieldInfo := range inInnerStructInfo.Fields { + csvHeadersLabels[j] = "" + inInnerFieldValue, err := getInnerField(val, inInnerWasPointer, fieldInfo.IndexChain) // Get the correct field header <-> position + if err != nil { + return err + } + csvHeadersLabels[j] = inInnerFieldValue + } + if err := writer.Write(csvHeadersLabels); err != nil { + return err + } + return nil + } + if err := write(inValue); err != nil { + return err + } + for v := range c { + val, _ := getConcreteReflectValueAndType(v) // Get the concrete type (not pointer) (Slice or Array) + if err := ensureStructOrPtr(inType); err != nil { + return err + } + if err := write(val); err != nil { + return err + } + } + writer.Flush() + return writer.Error() +} + +func writeTo(writer CSVWriter, in interface{}, omitHeaders bool) error { + inValue, inType := getConcreteReflectValueAndType(in) // Get the concrete type (not pointer) (Slice or Array) + if err := ensureInType(inType); err != nil { + return err + } + inInnerWasPointer, inInnerType := getConcreteContainerInnerType(inType) // Get the concrete inner type (not pointer) (Container<"?">) + if err := ensureInInnerType(inInnerType); err != nil { + return err + } + inInnerStructInfo := getStructInfo(inInnerType) // Get the inner struct info to get CSV annotations + csvHeadersLabels := make([]string, len(inInnerStructInfo.Fields)) + for i, fieldInfo := range inInnerStructInfo.Fields { // Used to write the header (first line) in CSV + csvHeadersLabels[i] = fieldInfo.getFirstKey() + } + if !omitHeaders { + if err := writer.Write(csvHeadersLabels); err != nil { + return err + } + } + inLen := inValue.Len() + for i := 0; i < inLen; i++ { // Iterate over container rows + for j, fieldInfo := range inInnerStructInfo.Fields { + csvHeadersLabels[j] = "" + inInnerFieldValue, err := getInnerField(inValue.Index(i), inInnerWasPointer, fieldInfo.IndexChain) // Get the correct field header <-> position + if err != nil { + return err + } + csvHeadersLabels[j] = inInnerFieldValue + } + if err := writer.Write(csvHeadersLabels); err != nil { + return err + } + } + writer.Flush() + return writer.Error() +} + +func ensureStructOrPtr(t reflect.Type) error { + switch t.Kind() { + case reflect.Struct: + fallthrough + case reflect.Ptr: + return nil + } + return fmt.Errorf("cannot use " + t.String() + ", only slice or array supported") +} + +// Check if the inType is an array or a slice +func ensureInType(outType reflect.Type) error { + switch outType.Kind() { + case reflect.Slice: + fallthrough + case reflect.Array: + return nil + } + return fmt.Errorf("cannot use " + outType.String() + ", only slice or array supported") +} + +// Check if the inInnerType is of type struct +func ensureInInnerType(outInnerType reflect.Type) error { + switch outInnerType.Kind() { + case reflect.Struct: + return nil + } + return fmt.Errorf("cannot use " + outInnerType.String() + ", only struct supported") +} + +func getInnerField(outInner reflect.Value, outInnerWasPointer bool, index []int) (string, error) { + oi := outInner + if outInnerWasPointer { + if oi.IsNil() { + return "", nil + } + oi = outInner.Elem() + } + + if oi.Kind() == reflect.Slice || oi.Kind() == reflect.Array { + i := index[0] + + if i >= oi.Len() { + return "", nil + } + + item := oi.Index(i) + if len(index) > 1 { + return getInnerField(item, false, index[1:]) + } + return getFieldAsString(item) + } + + // because pointers can be nil need to recurse one index at a time and perform nil check + if len(index) > 1 { + nextField := oi.Field(index[0]) + return getInnerField(nextField, nextField.Kind() == reflect.Ptr, index[1:]) + } + return getFieldAsString(oi.FieldByIndex(index)) +} diff --git a/vendor/github.com/gocarina/gocsv/reflect.go b/vendor/github.com/gocarina/gocsv/reflect.go new file mode 100644 index 0000000000..815fd5ef67 --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/reflect.go @@ -0,0 +1,241 @@ +package gocsv + +import ( + "fmt" + "reflect" + "strconv" + "strings" + "sync" +) + +// -------------------------------------------------------------------------- +// Reflection helpers + +type structInfo struct { + Fields []fieldInfo +} + +// fieldInfo is a struct field that should be mapped to a CSV column, or vice-versa +// Each IndexChain element before the last is the index of an the embedded struct field +// that defines Key as a tag +type fieldInfo struct { + keys []string + omitEmpty bool + IndexChain []int + defaultValue string +} + +func (f fieldInfo) getFirstKey() string { + return f.keys[0] +} + +func (f fieldInfo) matchesKey(key string) bool { + for _, k := range f.keys { + if key == k || strings.TrimSpace(key) == k { + return true + } + } + return false +} + +var structInfoCache sync.Map +var structMap = make(map[reflect.Type]*structInfo) +var structMapMutex sync.RWMutex + +func getStructInfo(rType reflect.Type) *structInfo { + stInfo, ok := structInfoCache.Load(rType) + if ok { + return stInfo.(*structInfo) + } + + fieldsList := getFieldInfos(rType, []int{}) + stInfo = &structInfo{fieldsList} + structInfoCache.Store(rType, stInfo) + + return stInfo.(*structInfo) +} + +func getFieldInfos(rType reflect.Type, parentIndexChain []int) []fieldInfo { + fieldsCount := rType.NumField() + fieldsList := make([]fieldInfo, 0, fieldsCount) + for i := 0; i < fieldsCount; i++ { + field := rType.Field(i) + if field.PkgPath != "" { + continue + } + + var cpy = make([]int, len(parentIndexChain)) + copy(cpy, parentIndexChain) + indexChain := append(cpy, i) + + currFieldInfo, filteredTags := filterTags(TagName, indexChain, field) + + if len(filteredTags) == 1 && filteredTags[0] == "-" { + continue + } + + // if the field is a pointer to a struct, follow the pointer then create fieldinfo for each field + if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct { + // Structs that implement any of the text or CSV marshaling methods + // should result in one value and not have their fields exposed + if !(canMarshal(field.Type.Elem()) || canMarshal(field.Type)) { + fieldsList = append(fieldsList, getFieldInfos(field.Type.Elem(), indexChain)...) + value := reflect.New(field.Type.Elem()) + switch value.Interface().(type) { + case TypeUnmarshaller: + case TypeUnmarshalCSVWithFields: + default: + if len(filteredTags) > 0 && filteredTags[0] == "" { + filteredTags[0] = "-" + } + } + } + } + // if the field is a struct, create a fieldInfo for each of its fields + if field.Type.Kind() == reflect.Struct { + // Structs that implement any of the text or CSV marshaling methods + // should result in one value and not have their fields exposed + if !(canMarshal(field.Type)) { + fieldsList = append(fieldsList, getFieldInfos(field.Type, indexChain)...) + value := reflect.New(field.Type) + switch value.Interface().(type) { + case TypeUnmarshaller: + case TypeUnmarshalCSVWithFields: + default: + if len(filteredTags) > 0 && filteredTags[0] == "" { + filteredTags[0] = "-" + } + } + } + } + + // if the field is an embedded struct, ignore the csv tag + if field.Anonymous { + continue + } + // if this is true, then we have a struct or a pointer to a struct and marshalled its fields. + // No further actions required. + if len(filteredTags) == 1 && filteredTags[0] == "-" { + continue + } + + if len(filteredTags) > 0 && filteredTags[0] != "" { + currFieldInfo.keys = filteredTags + } else { + currFieldInfo.keys = []string{normalizeName(field.Name)} + } + + if field.Type.Kind() == reflect.Slice || field.Type.Kind() == reflect.Array { + var arrayLength = -1 + if arrayTag, ok := field.Tag.Lookup(TagName + "[]"); ok { + arrayLength, _ = strconv.Atoi(arrayTag) + } + + // When the field is a slice/array of structs, create a fieldInfo for each index and each field + if field.Type.Elem().Kind() == reflect.Struct { + fieldInfos := getFieldInfos(field.Type.Elem(), []int{}) + + for idx := 0; idx < arrayLength; idx++ { + // copy index chain and append array index + var cpy2 = make([]int, len(indexChain)) + copy(cpy2, indexChain) + arrayIndexChain := append(cpy2, idx) + for _, childFieldInfo := range fieldInfos { + // copy array index chain and append array index + var cpy3 = make([]int, len(arrayIndexChain)) + copy(cpy3, arrayIndexChain) + + arrayFieldInfo := fieldInfo{ + IndexChain: append(cpy3, childFieldInfo.IndexChain...), + omitEmpty: childFieldInfo.omitEmpty, + defaultValue: childFieldInfo.defaultValue, + } + + // create cartesian product of keys + // eg: array field keys x struct field keys + for _, akey := range currFieldInfo.keys { + for _, fkey := range childFieldInfo.keys { + arrayFieldInfo.keys = append(arrayFieldInfo.keys, normalizeName(fmt.Sprintf("%s[%d].%s", akey, idx, fkey))) + } + } + + fieldsList = append(fieldsList, arrayFieldInfo) + } + } + } else if arrayLength > 0 { + // When the field is a slice/array of primitives, create a fieldInfo for each index + for idx := 0; idx < arrayLength; idx++ { + // copy index chain and append array index + var cpy2 = make([]int, len(indexChain)) + copy(cpy2, indexChain) + + arrayFieldInfo := fieldInfo{ + IndexChain: append(cpy2, idx), + omitEmpty: currFieldInfo.omitEmpty, + defaultValue: currFieldInfo.defaultValue, + } + + for _, akey := range currFieldInfo.keys { + arrayFieldInfo.keys = append(arrayFieldInfo.keys, normalizeName(fmt.Sprintf("%s[%d]", akey, idx))) + } + + fieldsList = append(fieldsList, arrayFieldInfo) + } + } else { + fieldsList = append(fieldsList, currFieldInfo) + } + } else { + fieldsList = append(fieldsList, currFieldInfo) + } + } + return fieldsList +} + +func filterTags(tagName string, indexChain []int, field reflect.StructField) (fieldInfo, []string) { + currFieldInfo := fieldInfo{IndexChain: indexChain} + + fieldTag := field.Tag.Get(tagName) + fieldTags := strings.Split(fieldTag, TagSeparator) + + filteredTags := []string{} + for _, fieldTagEntry := range fieldTags { + trimmedFieldTagEntry := strings.TrimSpace(fieldTagEntry) // handles cases like `csv:"foo, omitempty, default=test"` + if trimmedFieldTagEntry == "omitempty" { + currFieldInfo.omitEmpty = true + } else if strings.HasPrefix(trimmedFieldTagEntry, "default=") { + currFieldInfo.defaultValue = strings.TrimPrefix(trimmedFieldTagEntry, "default=") + } else { + filteredTags = append(filteredTags, normalizeName(trimmedFieldTagEntry)) + } + } + + return currFieldInfo, filteredTags +} + +func getConcreteContainerInnerType(in reflect.Type) (inInnerWasPointer bool, inInnerType reflect.Type) { + inInnerType = in.Elem() + inInnerWasPointer = false + if inInnerType.Kind() == reflect.Ptr { + inInnerWasPointer = true + inInnerType = inInnerType.Elem() + } + return inInnerWasPointer, inInnerType +} + +func getConcreteReflectValueAndType(in interface{}) (reflect.Value, reflect.Type) { + value := reflect.ValueOf(in) + if value.Kind() == reflect.Ptr { + value = value.Elem() + } + return value, value.Type() +} + +var errorInterface = reflect.TypeOf((*error)(nil)).Elem() + +func isErrorType(outType reflect.Type) bool { + if outType.Kind() != reflect.Interface { + return false + } + + return outType.Implements(errorInterface) +} diff --git a/vendor/github.com/gocarina/gocsv/safe_csv.go b/vendor/github.com/gocarina/gocsv/safe_csv.go new file mode 100644 index 0000000000..858b078165 --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/safe_csv.go @@ -0,0 +1,38 @@ +package gocsv + +//Wraps around SafeCSVWriter and makes it thread safe. +import ( + "encoding/csv" + "sync" +) + +type CSVWriter interface { + Write(row []string) error + Flush() + Error() error +} + +type SafeCSVWriter struct { + *csv.Writer + m sync.Mutex +} + +func NewSafeCSVWriter(original *csv.Writer) *SafeCSVWriter { + return &SafeCSVWriter{ + Writer: original, + } +} + +//Override write +func (w *SafeCSVWriter) Write(row []string) error { + w.m.Lock() + defer w.m.Unlock() + return w.Writer.Write(row) +} + +//Override flush +func (w *SafeCSVWriter) Flush() { + w.m.Lock() + w.Writer.Flush() + w.m.Unlock() +} diff --git a/vendor/github.com/gocarina/gocsv/types.go b/vendor/github.com/gocarina/gocsv/types.go new file mode 100644 index 0000000000..537151add9 --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/types.go @@ -0,0 +1,472 @@ +package gocsv + +import ( + "encoding" + "fmt" + "reflect" + "strconv" + "strings" + + "encoding/json" +) + +// -------------------------------------------------------------------------- +// Conversion interfaces + +// TypeMarshaller is implemented by any value that has a MarshalCSV method +// This converter is used to convert the value to it string representation +type TypeMarshaller interface { + MarshalCSV() (string, error) +} + +// TypeUnmarshaller is implemented by any value that has an UnmarshalCSV method +// This converter is used to convert a string to your value representation of that string +type TypeUnmarshaller interface { + UnmarshalCSV(string) error +} + +// TypeUnmarshalCSVWithFields can be implemented on whole structs to allow for whole structures to customized internal vs one off fields +type TypeUnmarshalCSVWithFields interface { + UnmarshalCSVWithFields(key, value string) error +} + +// NoUnmarshalFuncError is the custom error type to be raised in case there is no unmarshal function defined on type +type NoUnmarshalFuncError struct { + msg string +} + +func (e NoUnmarshalFuncError) Error() string { + return e.msg +} + +// NoMarshalFuncError is the custom error type to be raised in case there is no marshal function defined on type +type NoMarshalFuncError struct { + ty reflect.Type +} + +func (e NoMarshalFuncError) Error() string { + return "No known conversion from " + e.ty.String() + " to string, " + e.ty.String() + " does not implement TypeMarshaller nor Stringer" +} + +// -------------------------------------------------------------------------- +// Conversion helpers + +func toString(in interface{}) (string, error) { + inValue := reflect.ValueOf(in) + + switch inValue.Kind() { + case reflect.String: + return inValue.String(), nil + case reflect.Bool: + b := inValue.Bool() + if b { + return "true", nil + } + return "false", nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return fmt.Sprintf("%v", inValue.Int()), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return fmt.Sprintf("%v", inValue.Uint()), nil + case reflect.Float32: + return strconv.FormatFloat(inValue.Float(), byte('f'), -1, 32), nil + case reflect.Float64: + return strconv.FormatFloat(inValue.Float(), byte('f'), -1, 64), nil + } + return "", fmt.Errorf("No known conversion from " + inValue.Type().String() + " to string") +} + +func toBool(in interface{}) (bool, error) { + inValue := reflect.ValueOf(in) + + switch inValue.Kind() { + case reflect.String: + s := inValue.String() + s = strings.TrimSpace(s) + if strings.EqualFold(s, "yes") { + return true, nil + } else if strings.EqualFold(s, "no") || s == "" { + return false, nil + } else { + return strconv.ParseBool(s) + } + case reflect.Bool: + return inValue.Bool(), nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + i := inValue.Int() + if i != 0 { + return true, nil + } + return false, nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + i := inValue.Uint() + if i != 0 { + return true, nil + } + return false, nil + case reflect.Float32, reflect.Float64: + f := inValue.Float() + if f != 0 { + return true, nil + } + return false, nil + } + return false, fmt.Errorf("No known conversion from " + inValue.Type().String() + " to bool") +} + +func toInt(in interface{}) (int64, error) { + inValue := reflect.ValueOf(in) + + switch inValue.Kind() { + case reflect.String: + s := strings.TrimSpace(inValue.String()) + if s == "" { + return 0, nil + } + out := strings.SplitN(s, ".", 2) + return strconv.ParseInt(out[0], 0, 64) + case reflect.Bool: + if inValue.Bool() { + return 1, nil + } + return 0, nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return inValue.Int(), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return int64(inValue.Uint()), nil + case reflect.Float32, reflect.Float64: + return int64(inValue.Float()), nil + } + return 0, fmt.Errorf("No known conversion from " + inValue.Type().String() + " to int") +} + +func toUint(in interface{}) (uint64, error) { + inValue := reflect.ValueOf(in) + + switch inValue.Kind() { + case reflect.String: + s := strings.TrimSpace(inValue.String()) + if s == "" { + return 0, nil + } + + // support the float input + if strings.Contains(s, ".") { + f, err := strconv.ParseFloat(s, 64) + if err != nil { + return 0, err + } + return uint64(f), nil + } + return strconv.ParseUint(s, 0, 64) + case reflect.Bool: + if inValue.Bool() { + return 1, nil + } + return 0, nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return uint64(inValue.Int()), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return inValue.Uint(), nil + case reflect.Float32, reflect.Float64: + return uint64(inValue.Float()), nil + } + return 0, fmt.Errorf("No known conversion from " + inValue.Type().String() + " to uint") +} + +func toFloat(in interface{}) (float64, error) { + inValue := reflect.ValueOf(in) + + switch inValue.Kind() { + case reflect.String: + s := strings.TrimSpace(inValue.String()) + if s == "" { + return 0, nil + } + s = strings.Replace(s, ",", ".", -1) + return strconv.ParseFloat(s, 64) + case reflect.Bool: + if inValue.Bool() { + return 1, nil + } + return 0, nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return float64(inValue.Int()), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return float64(inValue.Uint()), nil + case reflect.Float32, reflect.Float64: + return inValue.Float(), nil + } + return 0, fmt.Errorf("No known conversion from " + inValue.Type().String() + " to float") +} + +func setField(field reflect.Value, value string, omitEmpty bool) error { + if field.Kind() == reflect.Ptr { + if omitEmpty && value == "" { + return nil + } + if field.IsNil() { + field.Set(reflect.New(field.Type().Elem())) + } + field = field.Elem() + } + + switch field.Interface().(type) { + case string: + s, err := toString(value) + if err != nil { + return err + } + field.SetString(s) + case bool: + b, err := toBool(value) + if err != nil { + return err + } + field.SetBool(b) + case int, int8, int16, int32, int64: + i, err := toInt(value) + if err != nil { + return err + } + field.SetInt(i) + case uint, uint8, uint16, uint32, uint64: + ui, err := toUint(value) + if err != nil { + return err + } + field.SetUint(ui) + case float32, float64: + f, err := toFloat(value) + if err != nil { + return err + } + field.SetFloat(f) + default: + // Not a native type, check for unmarshal method + if err := unmarshall(field, value); err != nil { + if _, ok := err.(NoUnmarshalFuncError); !ok { + return err + } + // Could not unmarshal, check for kind, e.g. renamed type from basic type + switch field.Kind() { + case reflect.String: + s, err := toString(value) + if err != nil { + return err + } + field.SetString(s) + case reflect.Bool: + b, err := toBool(value) + if err != nil { + return err + } + field.SetBool(b) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + i, err := toInt(value) + if err != nil { + return err + } + field.SetInt(i) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + ui, err := toUint(value) + if err != nil { + return err + } + field.SetUint(ui) + case reflect.Float32, reflect.Float64: + f, err := toFloat(value) + if err != nil { + return err + } + field.SetFloat(f) + case reflect.Slice, reflect.Struct: + if value == "" { + return nil + } + + err := json.Unmarshal([]byte(value), field.Addr().Interface()) + if err != nil { + return err + } + default: + return err + } + } else { + return nil + } + } + return nil +} + +func getFieldAsString(field reflect.Value) (str string, err error) { + switch field.Kind() { + case reflect.Interface, reflect.Ptr: + if field.IsNil() { + return "", nil + } + return getFieldAsString(field.Elem()) + default: + // Check if field is go native type + switch field.Interface().(type) { + case string: + return field.String(), nil + case bool: + if field.Bool() { + return "true", nil + } else { + return "false", nil + } + case int, int8, int16, int32, int64: + return fmt.Sprintf("%v", field.Int()), nil + case uint, uint8, uint16, uint32, uint64: + return fmt.Sprintf("%v", field.Uint()), nil + case float32: + str, err = toString(float32(field.Float())) + if err != nil { + return str, err + } + case float64: + str, err = toString(field.Float()) + if err != nil { + return str, err + } + default: + // Not a native type, check for marshal method + str, err = marshall(field) + if err != nil { + if _, ok := err.(NoMarshalFuncError); !ok { + return str, err + } + // If not marshal method, is field compatible with/renamed from native type + switch field.Kind() { + case reflect.String: + return field.String(), nil + case reflect.Bool: + str, err = toString(field.Bool()) + if err != nil { + return str, err + } + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + str, err = toString(field.Int()) + if err != nil { + return str, err + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + str, err = toString(field.Uint()) + if err != nil { + return str, err + } + case reflect.Float32: + str, err = toString(float32(field.Float())) + if err != nil { + return str, err + } + case reflect.Float64: + str, err = toString(field.Float()) + if err != nil { + return str, err + } + case reflect.Slice: + fallthrough + case reflect.Array: + b, err := json.Marshal(field.Addr().Interface()) + if err != nil { + return str, err + } + + str = string(b) + } + } else { + return str, nil + } + } + } + return str, nil +} + +// -------------------------------------------------------------------------- +// Un/serializations helpers + +func canMarshal(t reflect.Type) bool { + // Structs that implement any of the text or CSV marshaling methods + // should result in one value and not have their fields exposed + _, canMarshalText := t.MethodByName("MarshalText") + _, canMarshalCSV := t.MethodByName("MarshalCSV") + _, canUnmarshalText := t.MethodByName("UnmarshalText") + _, canUnmarshalCSV := t.MethodByName("UnmarshalCSV") + return canMarshalCSV || canMarshalText || canUnmarshalText || canUnmarshalCSV +} + +func unmarshall(field reflect.Value, value string) error { + dupField := field + unMarshallIt := func(finalField reflect.Value) error { + if finalField.CanInterface() { + fieldIface := finalField.Interface() + + fieldTypeUnmarshaller, ok := fieldIface.(TypeUnmarshaller) + if ok { + return fieldTypeUnmarshaller.UnmarshalCSV(value) + } + + // Otherwise try to use TextUnmarshaler + fieldTextUnmarshaler, ok := fieldIface.(encoding.TextUnmarshaler) + if ok { + return fieldTextUnmarshaler.UnmarshalText([]byte(value)) + } + } + + return NoUnmarshalFuncError{"No known conversion from string to " + field.Type().String() + ", " + field.Type().String() + " does not implement TypeUnmarshaller"} + } + for dupField.Kind() == reflect.Interface || dupField.Kind() == reflect.Ptr { + if dupField.IsNil() { + dupField = reflect.New(field.Type().Elem()) + field.Set(dupField) + return unMarshallIt(dupField) + } + dupField = dupField.Elem() + } + if dupField.CanAddr() { + return unMarshallIt(dupField.Addr()) + } + return NoUnmarshalFuncError{"No known conversion from string to " + field.Type().String() + ", " + field.Type().String() + " does not implement TypeUnmarshaller"} +} + +func marshall(field reflect.Value) (value string, err error) { + dupField := field + marshallIt := func(finalField reflect.Value) (string, error) { + if finalField.CanInterface() { + fieldIface := finalField.Interface() + + // Use TypeMarshaller when possible + fieldTypeMarhaller, ok := fieldIface.(TypeMarshaller) + if ok { + return fieldTypeMarhaller.MarshalCSV() + } + + // Otherwise try to use TextMarshaller + fieldTextMarshaler, ok := fieldIface.(encoding.TextMarshaler) + if ok { + text, err := fieldTextMarshaler.MarshalText() + return string(text), err + } + + // Otherwise try to use Stringer + fieldStringer, ok := fieldIface.(fmt.Stringer) + if ok { + return fieldStringer.String(), nil + } + } + + return value, NoMarshalFuncError{field.Type()} + } + for dupField.Kind() == reflect.Interface || dupField.Kind() == reflect.Ptr { + if dupField.IsNil() { + return value, nil + } + dupField = dupField.Elem() + } + if dupField.CanAddr() { + dupField = dupField.Addr() + } + return marshallIt(dupField) +} diff --git a/vendor/github.com/gocarina/gocsv/unmarshaller.go b/vendor/github.com/gocarina/gocsv/unmarshaller.go new file mode 100644 index 0000000000..50d528e368 --- /dev/null +++ b/vendor/github.com/gocarina/gocsv/unmarshaller.go @@ -0,0 +1,134 @@ +package gocsv + +import ( + "encoding/csv" + "fmt" + "reflect" +) + +// Unmarshaller is a CSV to struct unmarshaller. +type Unmarshaller struct { + reader *csv.Reader + Headers []string + fieldInfoMap []*fieldInfo + MismatchedHeaders []string + MismatchedStructFields []string + outType reflect.Type + out interface{} +} + +// NewUnmarshaller creates an unmarshaller from a csv.Reader and a struct. +func NewUnmarshaller(reader *csv.Reader, out interface{}) (*Unmarshaller, error) { + headers, err := reader.Read() + if err != nil { + return nil, err + } + headers = normalizeHeaders(headers) + + um := &Unmarshaller{reader: reader, outType: reflect.TypeOf(out)} + err = validate(um, out, headers) + if err != nil { + return nil, err + } + return um, nil +} + +// Read returns an interface{} whose runtime type is the same as the struct that +// was used to create the Unmarshaller. +func (um *Unmarshaller) Read() (interface{}, error) { + row, err := um.reader.Read() + if err != nil { + return nil, err + } + return um.unmarshalRow(row, nil) +} + +// ReadUnmatched is same as Read(), but returns a map of the columns that didn't match a field in the struct +func (um *Unmarshaller) ReadUnmatched() (interface{}, map[string]string, error) { + row, err := um.reader.Read() + if err != nil { + return nil, nil, err + } + unmatched := make(map[string]string) + value, err := um.unmarshalRow(row, unmatched) + return value, unmatched, err +} + +// validate ensures that a struct was used to create the Unmarshaller, and validates +// CSV headers against the CSV tags in the struct. +func validate(um *Unmarshaller, s interface{}, headers []string) error { + concreteType := reflect.TypeOf(s) + if concreteType.Kind() == reflect.Ptr { + concreteType = concreteType.Elem() + } + if err := ensureOutInnerType(concreteType); err != nil { + return err + } + structInfo := getStructInfo(concreteType) // Get struct info to get CSV annotations. + if len(structInfo.Fields) == 0 { + return ErrNoStructTags + } + csvHeadersLabels := make([]*fieldInfo, len(headers)) // Used to store the corresponding header <-> position in CSV + headerCount := map[string]int{} + for i, csvColumnHeader := range headers { + curHeaderCount := headerCount[csvColumnHeader] + if fieldInfo := getCSVFieldPosition(csvColumnHeader, structInfo, curHeaderCount); fieldInfo != nil { + csvHeadersLabels[i] = fieldInfo + if ShouldAlignDuplicateHeadersWithStructFieldOrder { + curHeaderCount++ + headerCount[csvColumnHeader] = curHeaderCount + } + } + } + + if FailIfDoubleHeaderNames { + if err := maybeDoubleHeaderNames(headers); err != nil { + return err + } + } + + um.Headers = headers + um.fieldInfoMap = csvHeadersLabels + um.MismatchedHeaders = mismatchHeaderFields(structInfo.Fields, headers) + um.MismatchedStructFields = mismatchStructFields(structInfo.Fields, headers) + um.out = s + return nil +} + +// unmarshalRow converts a CSV row to a struct, based on CSV struct tags. +// If unmatched is non nil, it is populated with any columns that don't map to a struct field +func (um *Unmarshaller) unmarshalRow(row []string, unmatched map[string]string) (interface{}, error) { + isPointer := false + concreteOutType := um.outType + if um.outType.Kind() == reflect.Ptr { + isPointer = true + concreteOutType = concreteOutType.Elem() + } + outValue := createNewOutInner(isPointer, concreteOutType) + for j, csvColumnContent := range row { + if j < len(um.fieldInfoMap) && um.fieldInfoMap[j] != nil { + fieldInfo := um.fieldInfoMap[j] + if err := setInnerField(&outValue, isPointer, fieldInfo.IndexChain, csvColumnContent, fieldInfo.omitEmpty); err != nil { // Set field of struct + return nil, fmt.Errorf("cannot assign field at %v to %s through index chain %v: %v", j, outValue.Type(), fieldInfo.IndexChain, err) + } + } else if unmatched != nil { + unmatched[um.Headers[j]] = csvColumnContent + } + } + return outValue.Interface(), nil +} + +// RenormalizeHeaders will remap the header names based on the headerNormalizer. +// This can be used to map a CSV to a struct where the CSV header names do not match in the file but a mapping is known +func (um *Unmarshaller) RenormalizeHeaders(headerNormalizer func([]string) []string) error { + headers := um.Headers + if headerNormalizer != nil { + headers = headerNormalizer(headers) + } + err := validate(um, um.out, headers) + if err != nil { + return err + } + + return nil +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 0aa4369c6d..568beff1ba 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -56,6 +56,9 @@ github.com/go-chi/chi/v5/middleware ## explicit; go 1.12 github.com/go-ole/go-ole github.com/go-ole/go-ole/oleutil +# github.com/gocarina/gocsv v0.0.0-20220927221512-ad3251f9fa25 +## explicit; go 1.13 +github.com/gocarina/gocsv # github.com/godbus/dbus/v5 v5.1.0 ## explicit; go 1.12 github.com/godbus/dbus/v5