diff --git a/filebeat/generator/fields/fields.go b/filebeat/generator/fields/fields.go new file mode 100644 index 00000000000..07c222bbf8f --- /dev/null +++ b/filebeat/generator/fields/fields.go @@ -0,0 +1,366 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fields + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "reflect" + "regexp" + "strings" + + "gopkg.in/yaml.v2" +) + +const ( + pipelinePath = "%s/module/%s/%s/ingest/pipeline.json" + fieldsYmlPath = "%s/module/%s/%s/_meta/fields.yml" +) + +var ( + types = map[string]string{ + "group": "group", + "DATA": "text", + "GREEDYDATA": "text", + "GREEDYMULTILINE": "text", + "HOSTNAME": "keyword", + "IPHOST": "keyword", + "IPORHOST": "keyword", + "LOGLEVEL": "keyword", + "MULTILINEQUERY": "text", + "NUMBER": "long", + "POSINT": "long", + "SYSLOGHOST": "keyword", + "SYSLOGTIMESTAMP": "text", + "LOCALDATETIME": "text", + "TIMESTAMP": "text", + "USERNAME": "keyword", + "WORD": "keyword", + } +) + +type pipeline struct { + Description string `json:"description"` + Processors []map[string]interface{} `json:"processors"` + OnFailure interface{} `json:"on_failure"` +} + +type field struct { + Type string + Elements []string +} + +type fieldYml struct { + Name string `yaml:"name"` + Description string `yaml:"description,omitempty"` + Example string `yaml:"example,omitempty"` + Type string `yaml:"type,omitempty"` + Fields []*fieldYml `yaml:"fields,omitempty"` +} + +func newFieldYml(name, typeName string, noDoc bool) *fieldYml { + if noDoc { + return &fieldYml{ + Name: name, + Type: typeName, + } + } + + return &fieldYml{ + Name: name, + Type: typeName, + Description: "Please add description", + Example: "Please add example", + } +} + +func newField(lp string) field { + lp = lp[1 : len(lp)-1] + ee := strings.Split(lp, ":") + if len(ee) != 2 { + return field{ + Type: ee[0], + Elements: nil, + } + } + + e := strings.Split(ee[1], ".") + return field{ + Type: ee[0], + Elements: e, + } +} + +func readPipeline(beatsPath, module, fileset string) (*pipeline, error) { + pp := fmt.Sprintf(pipelinePath, beatsPath, module, fileset) + r, err := ioutil.ReadFile(pp) + if err != nil { + return nil, err + } + + var p pipeline + err = json.Unmarshal(r, &p) + if err != nil { + return nil, err + } + + return &p, nil +} + +func addNewField(fs []field, f field) []field { + for _, ff := range fs { + if reflect.DeepEqual(ff, f) { + return fs + } + } + return append(fs, f) +} + +func getElementsFromPatterns(patterns []string) ([]field, error) { + r, err := regexp.Compile("{[\\.\\w\\:]*}") + if err != nil { + return nil, err + } + + var fs []field + for _, lp := range patterns { + pp := r.FindAllString(lp, -1) + for _, p := range pp { + f := newField(p) + if f.Elements == nil { + continue + } + fs = addNewField(fs, f) + } + + } + return fs, nil +} + +func accumulatePatterns(grok interface{}) ([]string, error) { + for k, v := range grok.(map[string]interface{}) { + if k == "patterns" { + vs := v.([]interface{}) + var p []string + for _, s := range vs { + p = append(p, s.(string)) + } + return p, nil + } + } + return nil, fmt.Errorf("No patterns in pipeline") +} + +func accumulateRemoveFields(remove interface{}, out []string) []string { + for k, v := range remove.(map[string]interface{}) { + if k == "field" { + switch vs := v.(type) { + case string: + return append(out, vs) + case []string: + for _, vv := range vs { + out = append(out, vv) + } + case []interface{}: + for _, vv := range vs { + vvs := vv.(string) + out = append(out, vvs) + } + default: + return out + + } + } + } + return out +} + +func accumulateRenameFields(rename interface{}, out map[string]string) map[string]string { + var from, to string + for k, v := range rename.(map[string]interface{}) { + if k == "field" { + from = v.(string) + } + if k == "target_field" { + to = v.(string) + } + } + out[from] = to + return out +} + +type processors struct { + patterns []string + remove []string + rename map[string]string +} + +func (p *processors) processFields() ([]field, error) { + f, err := getElementsFromPatterns(p.patterns) + if err != nil { + return nil, err + } + + for i, ff := range f { + fs := strings.Join(ff.Elements, ".") + for k, mv := range p.rename { + if k == fs { + ff.Elements = strings.Split(mv, ".") + } + } + for _, rm := range p.remove { + if fs == rm { + f = append(f[:i], f[i+1:]...) + } + } + } + return f, nil +} + +func getProcessors(p []map[string]interface{}) (*processors, error) { + var patterns, rmFields []string + mvFields := make(map[string]string) + + for _, e := range p { + if ee, ok := e["grok"]; ok { + pp, err := accumulatePatterns(ee) + if err != nil { + return nil, err + } + patterns = append(patterns, pp...) + } + if rm, ok := e["remove"]; ok { + rmFields = accumulateRemoveFields(rm, rmFields) + } + if mv, ok := e["rename"]; ok { + mvFields = accumulateRenameFields(mv, mvFields) + } + } + + if patterns == nil { + return nil, fmt.Errorf("No patterns in pipeline") + } + + return &processors{ + patterns: patterns, + remove: rmFields, + rename: mvFields, + }, nil +} + +func getFieldByName(f []*fieldYml, name string) *fieldYml { + for _, ff := range f { + if ff.Name == name { + return ff + } + } + return nil +} + +func insertLastField(f []*fieldYml, name, typeName string, noDoc bool) []*fieldYml { + ff := getFieldByName(f, name) + if ff != nil { + return f + } + + nf := newFieldYml(name, types[typeName], noDoc) + return append(f, nf) +} + +func insertGroup(out []*fieldYml, field field, index, count int, noDoc bool) []*fieldYml { + g := getFieldByName(out, field.Elements[index]) + if g != nil { + g.Fields = generateField(g.Fields, field, index+1, count, noDoc) + return out + } + + var groupFields []*fieldYml + groupFields = generateField(groupFields, field, index+1, count, noDoc) + group := newFieldYml(field.Elements[index], "group", noDoc) + group.Fields = groupFields + return append(out, group) +} + +func generateField(out []*fieldYml, field field, index, count int, noDoc bool) []*fieldYml { + if index+1 == count { + return insertLastField(out, field.Elements[index], field.Type, noDoc) + } + return insertGroup(out, field, index, count, noDoc) +} + +func generateFields(f []field, noDoc bool) []*fieldYml { + var out []*fieldYml + for _, ff := range f { + index := 1 + if len(ff.Elements) == 1 { + index = 0 + } + out = generateField(out, ff, index, len(ff.Elements), noDoc) + } + return out +} + +func (p *pipeline) toFieldsYml(noDoc bool) ([]byte, error) { + pr, err := getProcessors(p.Processors) + if err != nil { + return nil, err + } + + var fs []field + fs, err = pr.processFields() + if err != nil { + return nil, err + } + + f := generateFields(fs, noDoc) + var d []byte + d, err = yaml.Marshal(&f) + + return d, nil +} + +func writeFieldsYml(beatsPath, module, fileset string, f []byte) error { + p := fmt.Sprintf(fieldsYmlPath, beatsPath, module, fileset) + err := ioutil.WriteFile(p, f, 0664) + if err != nil { + return err + } + return nil +} + +// Generate generates a fields.yml file for a fileset. +func Generate(moduleName, filesetName, beatsPath string, noDoc bool) error { + p, err := readPipeline(beatsPath, moduleName, filesetName) + if err != nil { + return fmt.Errorf("cannot read pipeline.yml of fileset: %v", err) + } + + var d []byte + d, err = p.toFieldsYml(noDoc) + if err != nil { + return fmt.Errorf("cannot generate fields.yml for fileset: %v", err) + } + + err = writeFieldsYml(beatsPath, moduleName, filesetName, d) + if err != nil { + return fmt.Errorf("cannot write field.yml of fileset: %v", err) + } + + return nil +} diff --git a/filebeat/scripts/generator/fields/main_test.go b/filebeat/generator/fields/fields_test.go similarity index 99% rename from filebeat/scripts/generator/fields/main_test.go rename to filebeat/generator/fields/fields_test.go index 04f34bbeebe..3f038fe8ace 100644 --- a/filebeat/scripts/generator/fields/main_test.go +++ b/filebeat/generator/fields/fields_test.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -package main +package fields import ( "reflect" diff --git a/filebeat/generator/fileset/fileset.go b/filebeat/generator/fileset/fileset.go new file mode 100644 index 00000000000..7e43ee232fb --- /dev/null +++ b/filebeat/generator/fileset/fileset.go @@ -0,0 +1,67 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package fileset + +import ( + "fmt" + "path" + + "github.com/elastic/beats/filebeat/generator" +) + +// Generate generates a new fileset under a module. +// If fileset exists or the module does not exists, an error is returned. +func Generate(module, fileset, modulesPath, beatsPath string) error { + filesetPath := path.Join(modulesPath, "module", module, fileset) + if generator.DirExists(filesetPath) { + return fmt.Errorf("fileset already exists: %s", fileset) + } + + err := generator.CreateDirectories(filesetPath, []string{"", "_meta", "test", "config", "ingest"}) + if err != nil { + return err + } + + replace := map[string]string{ + "module": module, + "fileset": fileset, + } + templatesPath := path.Join(beatsPath, "scripts", "fileset") + filesToCopy := []string{ + path.Join("config", "config.yml"), + path.Join("ingest", "pipeline.json"), + "manifest.yml", + } + err = generator.CopyTemplates(templatesPath, filesetPath, filesToCopy, replace) + if err != nil { + return err + } + err = generator.RenameConfigYml(modulesPath, module, fileset) + if err != nil { + return err + } + + return addFilesetDashboard(module, fileset, modulesPath, templatesPath) +} + +func addFilesetDashboard(module, fileset, modulesPath, templatesPath string) error { + template := path.Join(templatesPath, "module-fileset.yml") + dest := path.Join(modulesPath, "module", module, "module.yml") + replacement := map[string]string{"module": module, "fileset": fileset} + return generator.AppendTemplate(template, dest, replacement) +} diff --git a/filebeat/scripts/generator/generator.go b/filebeat/generator/generator.go similarity index 100% rename from filebeat/scripts/generator/generator.go rename to filebeat/generator/generator.go diff --git a/filebeat/generator/module/module.go b/filebeat/generator/module/module.go new file mode 100644 index 00000000000..6c59f736470 --- /dev/null +++ b/filebeat/generator/module/module.go @@ -0,0 +1,50 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package module + +import ( + "fmt" + "path" + + "github.com/elastic/beats/filebeat/generator" +) + +// Generate generates a new module. +// If module exists, error is returned. +func Generate(module, modulesPath, beatsPath string) error { + modulePath := path.Join(modulesPath, "module", module) + if generator.DirExists(modulePath) { + return fmt.Errorf("module already exists: %s", module) + } + + err := generator.CreateDirectories(modulePath, []string{path.Join("_meta", "kibana", "6")}) + if err != nil { + return err + } + + replace := map[string]string{"module": module} + templatesPath := path.Join(beatsPath, "scripts", "module") + filesToCopy := []string{ + path.Join("_meta", "fields.yml"), + path.Join("_meta", "docs.asciidoc"), + path.Join("_meta", "config.yml"), + path.Join("module.yml"), + } + + return generator.CopyTemplates(templatesPath, modulePath, filesToCopy, replace) +} diff --git a/filebeat/scripts/generator/fields/main.go b/filebeat/scripts/generator/fields/main.go index 7a883160368..efc3d147a34 100644 --- a/filebeat/scripts/generator/fields/main.go +++ b/filebeat/scripts/generator/fields/main.go @@ -18,369 +18,35 @@ package main import ( - "encoding/json" "flag" "fmt" - "io/ioutil" "os" - "reflect" - "regexp" - "strings" - "gopkg.in/yaml.v2" + "github.com/elastic/beats/filebeat/generator/fields" ) -const ( - pipelinePath = "%s/module/%s/%s/ingest/pipeline.json" - fieldsYmlPath = "%s/module/%s/%s/_meta/fields.yml" -) - -var ( - types = map[string]string{ - "group": "group", - "DATA": "text", - "GREEDYDATA": "text", - "GREEDYMULTILINE": "text", - "HOSTNAME": "keyword", - "IPHOST": "keyword", - "IPORHOST": "keyword", - "LOGLEVEL": "keyword", - "MULTILINEQUERY": "text", - "NUMBER": "long", - "POSINT": "long", - "SYSLOGHOST": "keyword", - "SYSLOGTIMESTAMP": "text", - "LOCALDATETIME": "text", - "TIMESTAMP": "text", - "USERNAME": "keyword", - "WORD": "keyword", - } -) - -type pipeline struct { - Description string `json:"description"` - Processors []map[string]interface{} `json:"processors"` - OnFailure interface{} `json:"on_failure"` -} - -type field struct { - Type string - Elements []string -} - -type fieldYml struct { - Name string `yaml:"name"` - Description string `yaml:"description,omitempty"` - Example string `yaml:"example,omitempty"` - Type string `yaml:"type,omitempty"` - Fields []*fieldYml `yaml:"fields,omitempty"` -} - -func newFieldYml(name, typeName string, noDoc bool) *fieldYml { - if noDoc { - return &fieldYml{ - Name: name, - Type: typeName, - } - } - - return &fieldYml{ - Name: name, - Type: typeName, - Description: "Please add description", - Example: "Please add example", - } -} - -func newField(lp string) field { - lp = lp[1 : len(lp)-1] - ee := strings.Split(lp, ":") - if len(ee) != 2 { - return field{ - Type: ee[0], - Elements: nil, - } - } - - e := strings.Split(ee[1], ".") - return field{ - Type: ee[0], - Elements: e, - } -} - -func readPipeline(beatsPath, module, fileset string) (*pipeline, error) { - pp := fmt.Sprintf(pipelinePath, beatsPath, module, fileset) - r, err := ioutil.ReadFile(pp) - if err != nil { - return nil, err - } - - var p pipeline - err = json.Unmarshal(r, &p) - if err != nil { - return nil, err - } - - return &p, nil -} - -func addNewField(fs []field, f field) []field { - for _, ff := range fs { - if reflect.DeepEqual(ff, f) { - return fs - } - } - return append(fs, f) -} - -func getElementsFromPatterns(patterns []string) ([]field, error) { - r, err := regexp.Compile("{[\\.\\w\\:]*}") - if err != nil { - return nil, err - } - - var fs []field - for _, lp := range patterns { - pp := r.FindAllString(lp, -1) - for _, p := range pp { - f := newField(p) - if f.Elements == nil { - continue - } - fs = addNewField(fs, f) - } - - } - return fs, nil -} - -func accumulatePatterns(grok interface{}) ([]string, error) { - for k, v := range grok.(map[string]interface{}) { - if k == "patterns" { - vs := v.([]interface{}) - var p []string - for _, s := range vs { - p = append(p, s.(string)) - } - return p, nil - } - } - return nil, fmt.Errorf("No patterns in pipeline") -} - -func accumulateRemoveFields(remove interface{}, out []string) []string { - for k, v := range remove.(map[string]interface{}) { - if k == "field" { - switch vs := v.(type) { - case string: - return append(out, vs) - case []string: - for _, vv := range vs { - out = append(out, vv) - } - case []interface{}: - for _, vv := range vs { - vvs := vv.(string) - out = append(out, vvs) - } - default: - return out - - } - } - } - return out -} - -func accumulateRenameFields(rename interface{}, out map[string]string) map[string]string { - var from, to string - for k, v := range rename.(map[string]interface{}) { - if k == "field" { - from = v.(string) - } - if k == "target_field" { - to = v.(string) - } - } - out[from] = to - return out -} - -type processors struct { - patterns []string - remove []string - rename map[string]string -} - -func (p *processors) processFields() ([]field, error) { - f, err := getElementsFromPatterns(p.patterns) - if err != nil { - return nil, err - } - - for i, ff := range f { - fs := strings.Join(ff.Elements, ".") - for k, mv := range p.rename { - if k == fs { - ff.Elements = strings.Split(mv, ".") - } - } - for _, rm := range p.remove { - if fs == rm { - f = append(f[:i], f[i+1:]...) - } - } - } - return f, nil -} - -func getProcessors(p []map[string]interface{}) (*processors, error) { - var patterns, rmFields []string - mvFields := make(map[string]string) - - for _, e := range p { - if ee, ok := e["grok"]; ok { - pp, err := accumulatePatterns(ee) - if err != nil { - return nil, err - } - patterns = append(patterns, pp...) - } - if rm, ok := e["remove"]; ok { - rmFields = accumulateRemoveFields(rm, rmFields) - } - if mv, ok := e["rename"]; ok { - mvFields = accumulateRenameFields(mv, mvFields) - } - } - - if patterns == nil { - return nil, fmt.Errorf("No patterns in pipeline") - } - - return &processors{ - patterns: patterns, - remove: rmFields, - rename: mvFields, - }, nil -} - -func getFieldByName(f []*fieldYml, name string) *fieldYml { - for _, ff := range f { - if ff.Name == name { - return ff - } - } - return nil -} - -func insertLastField(f []*fieldYml, name, typeName string, noDoc bool) []*fieldYml { - ff := getFieldByName(f, name) - if ff != nil { - return f - } - - nf := newFieldYml(name, types[typeName], noDoc) - return append(f, nf) -} - -func insertGroup(out []*fieldYml, field field, index, count int, noDoc bool) []*fieldYml { - g := getFieldByName(out, field.Elements[index]) - if g != nil { - g.Fields = generateField(g.Fields, field, index+1, count, noDoc) - return out - } - - var groupFields []*fieldYml - groupFields = generateField(groupFields, field, index+1, count, noDoc) - group := newFieldYml(field.Elements[index], "group", noDoc) - group.Fields = groupFields - return append(out, group) -} - -func generateField(out []*fieldYml, field field, index, count int, noDoc bool) []*fieldYml { - if index+1 == count { - return insertLastField(out, field.Elements[index], field.Type, noDoc) - } - return insertGroup(out, field, index, count, noDoc) -} - -func generateFields(f []field, noDoc bool) []*fieldYml { - var out []*fieldYml - for _, ff := range f { - index := 1 - if len(ff.Elements) == 1 { - index = 0 - } - out = generateField(out, ff, index, len(ff.Elements), noDoc) - } - return out -} - -func (p *pipeline) toFieldsYml(noDoc bool) ([]byte, error) { - pr, err := getProcessors(p.Processors) - if err != nil { - return nil, err - } - - var fs []field - fs, err = pr.processFields() - if err != nil { - return nil, err - } - - f := generateFields(fs, noDoc) - var d []byte - d, err = yaml.Marshal(&f) - - return d, nil -} - -func writeFieldsYml(beatsPath, module, fileset string, f []byte) error { - p := fmt.Sprintf(fieldsYmlPath, beatsPath, module, fileset) - err := ioutil.WriteFile(p, f, 0664) - if err != nil { - return err - } - return nil -} - func main() { - module := flag.String("module", "", "Name of the module") - fileset := flag.String("fileset", "", "Name of the fileset") + moduleName := flag.String("module", "", "Name of the module") + filesetName := flag.String("fileset", "", "Name of the fileset") beatsPath := flag.String("beats_path", ".", "Path to elastic/beats") noDoc := flag.Bool("nodoc", false, "Generate documentation for fields") flag.Parse() - if *module == "" { + if *moduleName == "" { fmt.Println("Missing parameter: module") os.Exit(1) } - if *fileset == "" { + if *filesetName == "" { fmt.Println("Missing parameter: fileset") os.Exit(1) } - p, err := readPipeline(*beatsPath, *module, *fileset) + err := fields.Generate(*moduleName, *filesetName, *beatsPath, *noDoc) if err != nil { - fmt.Printf("Cannot read pipeline.yml of fileset: %v\n", err) + fmt.Printf("Cannot generate field.yml of fileset: %v\n", err) os.Exit(2) } - var d []byte - d, err = p.toFieldsYml(*noDoc) - if err != nil { - fmt.Printf("Cannot generate fields.yml for fileset: %v\n", err) - os.Exit(3) - } - - err = writeFieldsYml(*beatsPath, *module, *fileset, d) - if err != nil { - fmt.Printf("Cannot write field.yml of fileset: %v\n", err) - os.Exit(4) - } - - fmt.Printf("Fields.yml generated for %s/%s\n", *module, *fileset) + fmt.Printf("Fields.yml generated for %s/%s\n", *moduleName, *filesetName) } diff --git a/filebeat/scripts/generator/fileset/main.go b/filebeat/scripts/generator/fileset/main.go index 94d0ac6b6a2..2a2616089ba 100644 --- a/filebeat/scripts/generator/fileset/main.go +++ b/filebeat/scripts/generator/fileset/main.go @@ -23,66 +23,34 @@ import ( "os" "path" - "github.com/elastic/beats/filebeat/scripts/generator" + "github.com/elastic/beats/filebeat/generator" + "github.com/elastic/beats/filebeat/generator/fileset" ) -func generateFileset(module, fileset, modulesPath, beatsPath string) error { - filesetPath := path.Join(modulesPath, "module", module, fileset) - if generator.DirExists(filesetPath) { - return fmt.Errorf("fileset already exists: %s", fileset) - } - - err := generator.CreateDirectories(filesetPath, []string{"", "_meta", "test", "config", "ingest"}) - if err != nil { - return err - } - - replace := map[string]string{"module": module, "fileset": fileset} - templatesPath := path.Join(beatsPath, "scripts", "fileset") - filesToCopy := []string{path.Join("config", "config.yml"), path.Join("ingest", "pipeline.json"), "manifest.yml"} - err = generator.CopyTemplates(templatesPath, filesetPath, filesToCopy, replace) - if err != nil { - return err - } - err = generator.RenameConfigYml(modulesPath, module, fileset) - if err != nil { - return err - } - - return addFilesetDashboard(module, fileset, modulesPath, templatesPath) -} - -func addFilesetDashboard(module, fileset, modulesPath, templatesPath string) error { - template := path.Join(templatesPath, "module-fileset.yml") - dest := path.Join(modulesPath, "module", module, "module.yml") - replacement := map[string]string{"module": module, "fileset": fileset} - return generator.AppendTemplate(template, dest, replacement) -} - func main() { - module := flag.String("module", "", "Name of the module") - fileset := flag.String("fileset", "", "Name of the fileset") + moduleName := flag.String("module", "", "Name of the module") + filesetName := flag.String("fileset", "", "Name of the fileset") modulesPath := flag.String("path", ".", "Path to the generated fileset") beatsPath := flag.String("beats_path", ".", "Path to elastic/beats") flag.Parse() - if *module == "" { + if *moduleName == "" { fmt.Println("Missing parameter: module") os.Exit(1) } - if *fileset == "" { + if *filesetName == "" { fmt.Println("Missing parameter: fileset") os.Exit(1) } - modulePath := path.Join(*modulesPath, "module", *module) + modulePath := path.Join(*modulesPath, "module", *moduleName) if !generator.DirExists(modulePath) { fmt.Print("Cannot generate fileset: module not exists, please create module first by create-module command\n") os.Exit(2) } - err := generateFileset(*module, *fileset, *modulesPath, *beatsPath) + err := fileset.Generate(*moduleName, *filesetName, *modulesPath, *beatsPath) if err != nil { fmt.Printf("Cannot generate fileset: %v\n", err) os.Exit(3) diff --git a/filebeat/scripts/generator/module/main.go b/filebeat/scripts/generator/module/main.go index dedf1f3e279..9478e3c7901 100644 --- a/filebeat/scripts/generator/module/main.go +++ b/filebeat/scripts/generator/module/main.go @@ -21,45 +21,22 @@ import ( "flag" "fmt" "os" - "path" - "github.com/elastic/beats/filebeat/scripts/generator" + "github.com/elastic/beats/filebeat/generator/module" ) -func generateModule(module, modulesPath, beatsPath string) error { - modulePath := path.Join(modulesPath, "module", module) - if generator.DirExists(modulePath) { - return fmt.Errorf("module already exists: %s", module) - } - - err := generator.CreateDirectories(modulePath, []string{path.Join("_meta", "kibana", "6")}) - if err != nil { - return err - } - - replace := map[string]string{"module": module} - templatesPath := path.Join(beatsPath, "scripts", "module") - filesToCopy := []string{path.Join("_meta", "fields.yml"), path.Join("_meta", "docs.asciidoc"), path.Join("_meta", "config.yml"), path.Join("module.yml")} - generator.CopyTemplates(templatesPath, modulePath, filesToCopy, replace) - if err != nil { - return err - } - - return nil -} - func main() { - module := flag.String("module", "", "Name of the module") + name := flag.String("module", "", "Name of the module") modulePath := flag.String("path", ".", "Path to the generated fileset") beatsPath := flag.String("beats_path", ".", "Path to elastic/beats") flag.Parse() - if *module == "" { + if *name == "" { fmt.Println("Missing parameter: module") os.Exit(1) } - err := generateModule(*module, *modulePath, *beatsPath) + err := module.Generate(*name, *modulePath, *beatsPath) if err != nil { fmt.Printf("Cannot generate module: %v\n", err) os.Exit(2)