diff --git a/filebeat/beat/filebeat.go b/filebeat/beat/filebeat.go index 8ba67eef367..6245ce9e9f4 100644 --- a/filebeat/beat/filebeat.go +++ b/filebeat/beat/filebeat.go @@ -91,7 +91,10 @@ func (fb *Filebeat) Run(b *beat.Beat) error { // Start up spooler go fb.Spooler.Run() - crawl.Start(fb.FbConfig.Filebeat.Prospectors, fb.Spooler.Channel) + err = crawl.Start(fb.FbConfig.Filebeat.Prospectors, fb.Spooler.Channel) + if err != nil { + return err + } // Publishes event to output go Publish(b, fb) diff --git a/filebeat/crawler/crawler.go b/filebeat/crawler/crawler.go index 16121469ff9..86bf265981c 100644 --- a/filebeat/crawler/crawler.go +++ b/filebeat/crawler/crawler.go @@ -27,18 +27,22 @@ type Crawler struct { running bool } -func (crawler *Crawler) Start(files []config.ProspectorConfig, eventChan chan *input.FileEvent) { +func (crawler *Crawler) Start(prospectorConfigs []config.ProspectorConfig, eventChan chan *input.FileEvent) error { pendingProspectorCnt := 0 crawler.running = true + if len(prospectorConfigs) == 0 { + return fmt.Errorf("No prospectors defined. You must have at least one prospector defined in the config file.") + } + // Prospect the globs/paths given on the command line and launch harvesters - for _, fileconfig := range files { + for _, prospectorConfig := range prospectorConfigs { - logp.Debug("prospector", "File Configs: %v", fileconfig.Paths) + logp.Debug("prospector", "File Configs: %v", prospectorConfig.Paths) prospector := &Prospector{ - ProspectorConfig: fileconfig, + ProspectorConfig: prospectorConfig, registrar: crawler.Registrar, } @@ -76,6 +80,8 @@ func (crawler *Crawler) Start(files []config.ProspectorConfig, eventChan chan *i } logp.Info("All prospectors initialised with %d states to persist", len(crawler.Registrar.State)) + + return nil } func (crawler *Crawler) Stop() { diff --git a/filebeat/crawler/crawler_test.go b/filebeat/crawler/crawler_test.go new file mode 100644 index 00000000000..3743579649e --- /dev/null +++ b/filebeat/crawler/crawler_test.go @@ -0,0 +1,19 @@ +package crawler + +import ( + "testing" + + "github.com/elastic/beats/filebeat/config" + "github.com/elastic/beats/filebeat/input" + "github.com/stretchr/testify/assert" +) + +func TestCrawlerStartError(t *testing.T) { + crawler := Crawler{} + channel := make(chan *input.FileEvent, 1) + prospectorConfigs := []config.ProspectorConfig{} + + error := crawler.Start(prospectorConfigs, channel) + + assert.Error(t, error) +} diff --git a/filebeat/tests/system/config/filebeat.yml.j2 b/filebeat/tests/system/config/filebeat.yml.j2 index 28e03525cbe..6c04a4fe01f 100644 --- a/filebeat/tests/system/config/filebeat.yml.j2 +++ b/filebeat/tests/system/config/filebeat.yml.j2 @@ -1,6 +1,10 @@ ############################# Filebeat ###################################### filebeat: prospectors: + {% if prospectors is not defined %} + {% set prospectors = true %} + {% endif %} + {% if prospectors %} - # Paths that should be crawled and fetched {% if path %}paths: @@ -48,7 +52,7 @@ filebeat: timeout: 1s max_lines: {{ max_lines|default(500) }} {% endif %} - + {% endif %} spool_size: idle_timeout: 0.1s registry_file: {{ fb.working_dir + '/' }}{{ registryFile|default(".filebeat")}} diff --git a/filebeat/tests/system/test_prospector.py b/filebeat/tests/system/test_prospector.py index a4afb0e3f31..2fe1edddd21 100644 --- a/filebeat/tests/system/test_prospector.py +++ b/filebeat/tests/system/test_prospector.py @@ -231,3 +231,21 @@ def test_rotating_ignore_older_low_write_rate(self): max_timeout=5) proc.kill_and_wait() + + def test_shutdown_no_prospectors(self): + self.render_config_template( + prospectors=False, + ) + + proc = self.start_filebeat(debug_selectors=['*']) + + # wait for first "Start next scan" log message + self.wait_until( + lambda: self.log_contains( + "No prospectors defined"), + max_timeout=10) + + self.wait_until( + lambda: self.log_contains( + "shutting down"), + max_timeout=10)