diff --git a/CHANGELOG.asciidoc b/CHANGELOG.asciidoc index f54b760616f0..e8edbd73d80f 100644 --- a/CHANGELOG.asciidoc +++ b/CHANGELOG.asciidoc @@ -66,6 +66,7 @@ https://github.com/elastic/beats/compare/v5.1.1...master[Check the HEAD diff] - RPM/deb packages will now install the config file with 0600 permissions. {pull}3382[3382] - Add the option to pass custom HTTP headers to the Elasticsearch output. {pull}3400[3400] - Unify `regexp` and `contains` conditionals, for both to support array of strings and convert numbers to strings if required. {pull}3469[3469] +- Add the option to load the sample dashboards during the Beat startup phase. {pull}3506[3506] *Metricbeat* diff --git a/filebeat/beater/filebeat.go b/filebeat/beater/filebeat.go index 1249fb712f9e..8c2c6a5d03d3 100644 --- a/filebeat/beater/filebeat.go +++ b/filebeat/beater/filebeat.go @@ -20,8 +20,7 @@ import ( ) var ( - once = flag.Bool("once", false, "Run filebeat only once until all harvesters reach EOF") - setup = flag.Bool("setup", false, "Run the setup phase for the modules") + once = flag.Bool("once", false, "Run filebeat only once until all harvesters reach EOF") ) // Filebeat is a beater object. Contains all objects needed to run the beat @@ -71,11 +70,12 @@ func New(b *beat.Beat, rawConfig *common.Config) (beat.Beater, error) { return fb, nil } -// Setup is called on user request (the -setup flag) to do the initial Beat setup. -func (fb *Filebeat) Setup(b *beat.Beat) error { +// modulesSetup is called when modules are configured to do the initial +// setup. +func (fb *Filebeat) modulesSetup(b *beat.Beat) error { esConfig := b.Config.Output["elasticsearch"] if esConfig == nil || !esConfig.Enabled() { - return fmt.Errorf("Setup requested but the Elasticsearch output is not configured/enabled") + return fmt.Errorf("Filebeat modules configured but the Elasticsearch output is not configured/enabled") } esClient, err := elasticsearch.NewConnectedClient(esConfig) if err != nil { @@ -83,7 +83,12 @@ func (fb *Filebeat) Setup(b *beat.Beat) error { } defer esClient.Close() - return fb.moduleRegistry.Setup(esClient) + err = fb.moduleRegistry.LoadPipelines(esClient) + if err != nil { + return err + } + + return nil } // Run allows the beater to be run as a beat. @@ -91,8 +96,8 @@ func (fb *Filebeat) Run(b *beat.Beat) error { var err error config := fb.config - if *setup { - err = fb.Setup(b) + if !fb.moduleRegistry.Empty() { + err = fb.modulesSetup(b) if err != nil { return err } diff --git a/filebeat/docs/reference/configuration.asciidoc b/filebeat/docs/reference/configuration.asciidoc index 637b01c25874..46c883a7da9c 100644 --- a/filebeat/docs/reference/configuration.asciidoc +++ b/filebeat/docs/reference/configuration.asciidoc @@ -21,6 +21,7 @@ configuration settings, you need to restart {beatname_uc} to pick up the changes * <> * <> * <> +* <> * <> * <> diff --git a/filebeat/docs/reference/configuration/filebeat-options.asciidoc b/filebeat/docs/reference/configuration/filebeat-options.asciidoc index 4edcd59468af..a048747a01a5 100644 --- a/filebeat/docs/reference/configuration/filebeat-options.asciidoc +++ b/filebeat/docs/reference/configuration/filebeat-options.asciidoc @@ -588,6 +588,9 @@ include::../../../../libbeat/docs/outputconfig.asciidoc[] pass::[] include::../../../../libbeat/docs/shared-path-config.asciidoc[] +pass::[] +include::../../../../libbeat/docs/dashboardsconfig.asciidoc[] + pass::[] include::../../../../libbeat/docs/loggingconfig.asciidoc[] diff --git a/filebeat/filebeat.full.yml b/filebeat/filebeat.full.yml index 8b35c37ebabd..f61a8f24be91 100644 --- a/filebeat/filebeat.full.yml +++ b/filebeat/filebeat.full.yml @@ -897,6 +897,45 @@ output.elasticsearch: # the default for the logs path is a logs subdirectory inside the home path. #path.logs: ${path.home}/logs +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag. +#dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#dashboards.url: + +# The directory from where to read the dashboards. It is used instead of the URL +# when it has a value. +#dashboards.directory: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the URL when it has a value. +#dashboards.file: + +# If this option is enabled, the snapshot URL is used instead of the default URL. +#dashboard.snapshot: false + +# The URL from where to download the snapshot version of the dashboards. By default +# this has a value which is computed based on the Beat name and version. +#dashboard.snapshot_url + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#dashboard.beat: filebeat + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#dashboards.index: + #================================ Logging ====================================== # There are three options for the log output: syslog, file, stderr. # Under Windows systems, the log files are per default sent to the file output, diff --git a/filebeat/fileset/modules.go b/filebeat/fileset/modules.go index 4dba8132b801..2bdc3631e315 100644 --- a/filebeat/fileset/modules.go +++ b/filebeat/fileset/modules.go @@ -247,8 +247,8 @@ type PipelineLoader interface { LoadJSON(path string, json map[string]interface{}) error } -// Setup is called on -setup and loads the pipelines for each configured fileset. -func (reg *ModuleRegistry) Setup(esClient PipelineLoader) error { +// LoadPipelines loads the pipelines for each configured fileset. +func (reg *ModuleRegistry) LoadPipelines(esClient PipelineLoader) error { for module, filesets := range reg.registry { for name, fileset := range filesets { pipelineID, content, err := fileset.GetPipeline() @@ -273,3 +273,11 @@ func loadPipeline(esClient PipelineLoader, pipelineID string, content map[string logp.Info("Elasticsearch pipeline with ID '%s' loaded", pipelineID) return nil } + +func (reg *ModuleRegistry) Empty() bool { + count := 0 + for _, filesets := range reg.registry { + count += len(filesets) + } + return count == 0 +} diff --git a/filebeat/fileset/modules_integration_test.go b/filebeat/fileset/modules_integration_test.go index 129ef8976661..acfc98d7b901 100644 --- a/filebeat/fileset/modules_integration_test.go +++ b/filebeat/fileset/modules_integration_test.go @@ -48,7 +48,7 @@ func TestSetupNginx(t *testing.T) { reg, err := newModuleRegistry(modulesPath, configs, nil) assert.NoError(t, err) - err = reg.Setup(client) + err = reg.LoadPipelines(client) assert.NoError(t, err) status, _, _ := client.Request("GET", "/_ingest/pipeline/nginx-access-with_plugins", "", nil, nil) diff --git a/filebeat/tests/system/test_modules.py b/filebeat/tests/system/test_modules.py index 304742edf454..4dc7240b9a54 100644 --- a/filebeat/tests/system/test_modules.py +++ b/filebeat/tests/system/test_modules.py @@ -71,7 +71,7 @@ def run_on_file(self, module, fileset, test_file, cfgfile): cmd = [ self.filebeat, "-systemTest", - "-e", "-d", "*", "-once", "-setup", + "-e", "-d", "*", "-once", "-c", cfgfile, "-modules={}".format(module), "-M", "{module}.{fileset}.var.paths=[{test_file}]".format( diff --git a/heartbeat/docs/reference/configuration.asciidoc b/heartbeat/docs/reference/configuration.asciidoc index 840e59fc58a3..1726e1fc77b6 100644 --- a/heartbeat/docs/reference/configuration.asciidoc +++ b/heartbeat/docs/reference/configuration.asciidoc @@ -19,6 +19,7 @@ configuration settings, you need to restart Heartbeat to pick up the changes. * <> * <> * <> +* <> * <> * <> diff --git a/heartbeat/docs/reference/configuration/heartbeat-options.asciidoc b/heartbeat/docs/reference/configuration/heartbeat-options.asciidoc index 8a10cc061f1e..ac845e29bd6f 100644 --- a/heartbeat/docs/reference/configuration/heartbeat-options.asciidoc +++ b/heartbeat/docs/reference/configuration/heartbeat-options.asciidoc @@ -455,6 +455,9 @@ include::../../../../libbeat/docs/outputconfig.asciidoc[] pass::[] include::../../../../libbeat/docs/shared-path-config.asciidoc[] +pass::[] +include::../../../../libbeat/docs/dashboardsconfig.asciidoc[] + pass::[] include::../../../../libbeat/docs/loggingconfig.asciidoc[] diff --git a/heartbeat/heartbeat.full.yml b/heartbeat/heartbeat.full.yml index 563b02ecdef3..3155f7aa0159 100644 --- a/heartbeat/heartbeat.full.yml +++ b/heartbeat/heartbeat.full.yml @@ -745,6 +745,45 @@ output.elasticsearch: # the default for the logs path is a logs subdirectory inside the home path. #path.logs: ${path.home}/logs +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag. +#dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#dashboards.url: + +# The directory from where to read the dashboards. It is used instead of the URL +# when it has a value. +#dashboards.directory: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the URL when it has a value. +#dashboards.file: + +# If this option is enabled, the snapshot URL is used instead of the default URL. +#dashboard.snapshot: false + +# The URL from where to download the snapshot version of the dashboards. By default +# this has a value which is computed based on the Beat name and version. +#dashboard.snapshot_url + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#dashboard.beat: heartbeat + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#dashboards.index: + #================================ Logging ====================================== # There are three options for the log output: syslog, file, stderr. # Under Windows systems, the log files are per default sent to the file output, diff --git a/libbeat/_meta/config.full.yml b/libbeat/_meta/config.full.yml index 05087a79d4c8..1a89c33cc879 100644 --- a/libbeat/_meta/config.full.yml +++ b/libbeat/_meta/config.full.yml @@ -547,6 +547,45 @@ output.elasticsearch: # the default for the logs path is a logs subdirectory inside the home path. #path.logs: ${path.home}/logs +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag. +#dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#dashboards.url: + +# The directory from where to read the dashboards. It is used instead of the URL +# when it has a value. +#dashboards.directory: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the URL when it has a value. +#dashboards.file: + +# If this option is enabled, the snapshot URL is used instead of the default URL. +#dashboard.snapshot: false + +# The URL from where to download the snapshot version of the dashboards. By default +# this has a value which is computed based on the Beat name and version. +#dashboard.snapshot_url + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#dashboard.beat: beatname + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#dashboards.index: + #================================ Logging ====================================== # There are three options for the log output: syslog, file, stderr. # Under Windows systems, the log files are per default sent to the file output, diff --git a/libbeat/beat/beat.go b/libbeat/beat/beat.go index 13b0f2b7b369..2d9d2a35a4a7 100644 --- a/libbeat/beat/beat.go +++ b/libbeat/beat/beat.go @@ -44,7 +44,9 @@ import ( "github.com/elastic/beats/libbeat/cfgfile" "github.com/elastic/beats/libbeat/common" + "github.com/elastic/beats/libbeat/dashboards/dashboards" "github.com/elastic/beats/libbeat/logp" + "github.com/elastic/beats/libbeat/outputs/elasticsearch" "github.com/elastic/beats/libbeat/paths" "github.com/elastic/beats/libbeat/plugin" "github.com/elastic/beats/libbeat/processors" @@ -100,10 +102,12 @@ type BeatConfig struct { Logging logp.Logging `config:"logging"` Processors processors.PluginConfig `config:"processors"` Path paths.Path `config:"path"` + Dashboards *common.Config `config:"dashboards"` } var ( printVersion = flag.Bool("version", false, "Print the version and exit") + setup = flag.Bool("setup", false, "Load the sample Kibana dashboards") ) var debugf = logp.MakeDebug("beat") @@ -209,6 +213,11 @@ func (b *Beat) launch(bt Creator) error { svc.HandleSignals(beater.Stop) + err = b.loadDashboards() + if err != nil { + return err + } + logp.Info("%s start running.", b.Name) defer logp.Info("%s stopped.", b.Name) defer logp.LogTotalExpvars(&b.Config.Logging) @@ -285,6 +294,39 @@ func (b *Beat) configure() error { return nil } +func (b *Beat) loadDashboards() error { + if *setup { + // -setup implies dashboards.enabled=true + if b.Config.Dashboards == nil { + b.Config.Dashboards = common.NewConfig() + } + err := b.Config.Dashboards.SetBool("enabled", -1, true) + if err != nil { + return fmt.Errorf("Error setting dashboard.enabled=true: %v", err) + } + } + + if b.Config.Dashboards != nil && b.Config.Dashboards.Enabled() { + esConfig := b.Config.Output["elasticsearch"] + if esConfig == nil || !esConfig.Enabled() { + return fmt.Errorf("Dashboard loading requested but the Elasticsearch output is not configured/enabled") + } + esClient, err := elasticsearch.NewConnectedClient(esConfig) + if err != nil { + return fmt.Errorf("Error creating ES client: %v", err) + } + defer esClient.Close() + + err = dashboards.ImportDashboards(b.Name, b.Version, esClient, b.Config.Dashboards) + if err != nil { + return fmt.Errorf("Error importing Kibana dashboards: %v", err) + } + logp.Info("Kibana dashboards successfully loaded.") + } + + return nil +} + // handleError handles the given error by logging it and then returning the // error. If the err is nil or is a GracefulExit error then the method will // return nil without logging anything. diff --git a/libbeat/dashboards/dashboards/config.go b/libbeat/dashboards/dashboards/config.go new file mode 100644 index 000000000000..d3abebf37f38 --- /dev/null +++ b/libbeat/dashboards/dashboards/config.go @@ -0,0 +1,23 @@ +package dashboards + +type DashboardsConfig struct { + Enabled bool `config:"enabled"` + KibanaIndex string `config:"kibana_index"` + Index string `config:"index"` + Dir string `config:"directory"` + File string `config:"file"` + Beat string `config:"beat"` + URL string `config:"url"` + OnlyDashboards bool `config:"only_dashboards"` + OnlyIndex bool `config:"only_index"` + Snapshot bool `config:"snapshot"` + SnapshotURL string `config:"snapshot_url"` +} + +var defaultDashboardsConfig = DashboardsConfig{ + KibanaIndex: ".kibana", +} +var ( + defaultURLPattern = "https://artifacts.elastic.co/downloads/beats/beats-dashboards/beats-dashboards-%s.zip" + snapshotURLPattern = "https://beats-nightlies.s3.amazonaws.com/dashboards/beats-dashboards-%s-SNAPSHOT.zip" +) diff --git a/libbeat/dashboards/dashboards/dashboards.go b/libbeat/dashboards/dashboards/dashboards.go new file mode 100644 index 000000000000..7a99696253f1 --- /dev/null +++ b/libbeat/dashboards/dashboards/dashboards.go @@ -0,0 +1,42 @@ +package dashboards + +import ( + "fmt" + + "github.com/elastic/beats/libbeat/common" + "github.com/elastic/beats/libbeat/outputs/elasticsearch" +) + +// DashboardLoader is a subset of the Elasticsearch client API capable of +// loading the dashboards. +type DashboardLoader interface { + LoadJSON(path string, json map[string]interface{}) error + CreateIndex(index string, body interface{}) (int, *elasticsearch.QueryResult, error) +} + +func ImportDashboards(beatName, beatVersion string, esClient DashboardLoader, cfg *common.Config) error { + if cfg == nil || !cfg.Enabled() { + return nil + } + + dashConfig := defaultDashboardsConfig + dashConfig.Beat = beatName + dashConfig.URL = fmt.Sprintf(defaultURLPattern, beatVersion) + dashConfig.SnapshotURL = fmt.Sprintf(snapshotURLPattern, beatVersion) + + err := cfg.Unpack(&dashConfig) + if err != nil { + return err + } + + importer, err := NewImporter(&dashConfig, esClient, nil) + if err != nil { + return nil + } + + if err := importer.Import(); err != nil { + return err + } + + return nil +} diff --git a/libbeat/dashboards/dashboards/importer.go b/libbeat/dashboards/dashboards/importer.go new file mode 100644 index 000000000000..ad3150fa8bae --- /dev/null +++ b/libbeat/dashboards/dashboards/importer.go @@ -0,0 +1,535 @@ +package dashboards + +import ( + "archive/zip" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "os" + "path" + "path/filepath" + "strings" + + "github.com/elastic/beats/libbeat/common" + "github.com/elastic/beats/libbeat/logp" +) + +// MessageOutputter is a function type for injecting status logging +// into this module. +type MessageOutputter func(msg string, a ...interface{}) + +type Importer struct { + cfg *DashboardsConfig + client DashboardLoader + msgOutputter *MessageOutputter +} + +func NewImporter(cfg *DashboardsConfig, client DashboardLoader, msgOutputter *MessageOutputter) (*Importer, error) { + return &Importer{ + cfg: cfg, + client: client, + msgOutputter: msgOutputter, + }, nil +} + +func (imp Importer) statusMsg(msg string, a ...interface{}) { + if imp.msgOutputter != nil { + (*imp.msgOutputter)(msg, a...) + } else { + logp.Debug("dashboards", msg, a...) + } +} + +// Import imports the Kibana dashboards according to the configuration options. +func (imp Importer) Import() error { + + err := imp.CreateKibanaIndex() + if err != nil { + return fmt.Errorf("Error creating Kibana index: %v", err) + } + + if imp.cfg.Dir != "" { + err = imp.ImportKibana(imp.cfg.Dir) + if err != nil { + return fmt.Errorf("Error importing directory %s: %v", imp.cfg.Dir, err) + } + } else { + if imp.cfg.URL != "" || imp.cfg.Snapshot || imp.cfg.File != "" { + err = imp.ImportArchive() + if err != nil { + return fmt.Errorf("Error importing URL/file: %v", err) + } + } else { + return fmt.Errorf("No URL and no file specify. Nothing to import") + } + } + return nil +} + +// CreateKibanaIndex creates the kibana index if it doesn't exists and sets +// some index properties which are needed as a workaround for: +// https://github.com/elastic/beats-dashboards/issues/94 +func (imp Importer) CreateKibanaIndex() error { + imp.client.CreateIndex(imp.cfg.KibanaIndex, nil) + _, _, err := imp.client.CreateIndex(imp.cfg.KibanaIndex+"/_mapping/search", + common.MapStr{ + "search": common.MapStr{ + "properties": common.MapStr{ + "hits": common.MapStr{ + "type": "integer", + }, + "version": common.MapStr{ + "type": "integer", + }, + }, + }, + }) + if err != nil { + fmt.Fprintln(os.Stderr, fmt.Sprintf("Failed to set the mapping - %s", err)) + } + return nil +} + +func (imp Importer) ImportJSONFile(fileType string, file string) error { + + path := "/" + imp.cfg.KibanaIndex + "/" + fileType + + reader, err := ioutil.ReadFile(file) + if err != nil { + return fmt.Errorf("Failed to read %s. Error: %s", file, err) + } + var jsonContent map[string]interface{} + json.Unmarshal(reader, &jsonContent) + fileBase := strings.TrimSuffix(filepath.Base(file), filepath.Ext(file)) + + err = imp.client.LoadJSON(path+"/"+fileBase, jsonContent) + if err != nil { + return fmt.Errorf("Failed to load %s under %s/%s: %s", file, path, fileBase, err) + } + + return nil +} + +func (imp Importer) ImportDashboard(file string) error { + + imp.statusMsg("Import dashboard %s", file) + + /* load dashboard */ + err := imp.ImportJSONFile("dashboard", file) + if err != nil { + return err + } + + /* load the visualizations and searches that depend on the dashboard */ + err = imp.importPanelsFromDashboard(file) + if err != nil { + return err + } + + return nil +} + +func (imp Importer) importPanelsFromDashboard(file string) (err error) { + + // directory with the dashboards + dir := filepath.Dir(file) + + // main directory with dashboard, search, visualizations directories + mainDir := filepath.Dir(dir) + + reader, err := ioutil.ReadFile(file) + if err != nil { + return + } + type record struct { + Title string `json:"title"` + PanelsJSON string `json:"panelsJSON"` + } + type panel struct { + ID string `json:"id"` + Type string `json:"type"` + } + + var jsonContent record + json.Unmarshal(reader, &jsonContent) + + var widgets []panel + json.Unmarshal([]byte(jsonContent.PanelsJSON), &widgets) + + for _, widget := range widgets { + + if widget.Type == "visualization" { + err = imp.ImportVisualization(path.Join(mainDir, "visualization", widget.ID+".json")) + if err != nil { + return err + } + } else if widget.Type == "search" { + err = imp.ImportSearch(path.Join(mainDir, "search", widget.ID+".json")) + if err != nil { + return err + } + } else { + imp.statusMsg("Widgets: %v", widgets) + return fmt.Errorf("Unknown panel type %s in %s", widget.Type, file) + } + } + return +} + +func (imp Importer) importSearchFromVisualization(file string) error { + type record struct { + Title string `json:"title"` + SavedSearchID string `json:"savedSearchId"` + } + + reader, err := ioutil.ReadFile(file) + if err != nil { + return nil + } + + var jsonContent record + json.Unmarshal(reader, &jsonContent) + id := jsonContent.SavedSearchID + if len(id) == 0 { + // no search used + return nil + } + + // directory with the visualizations + dir := filepath.Dir(file) + + // main directory + mainDir := filepath.Dir(dir) + + searchFile := path.Join(mainDir, "search", id+".json") + + if searchFile != "" { + // visualization depends on search + if err := imp.ImportSearch(searchFile); err != nil { + return err + } + } + return nil +} + +func (imp Importer) ImportVisualization(file string) error { + + imp.statusMsg("Import visualization %s", file) + if err := imp.ImportJSONFile("visualization", file); err != nil { + return err + } + + err := imp.importSearchFromVisualization(file) + if err != nil { + return err + } + return nil +} + +func (imp Importer) ImportSearch(file string) error { + + reader, err := ioutil.ReadFile(file) + if err != nil { + return err + } + searchName := strings.TrimSuffix(filepath.Base(file), filepath.Ext(file)) + + var searchContent common.MapStr + err = json.Unmarshal(reader, &searchContent) + if err != nil { + return fmt.Errorf("Failed to unmarshal search content %s: %v", searchName, err) + } + + if imp.cfg.Index != "" { + + // change index pattern name + if savedObject, ok := searchContent["kibanaSavedObjectMeta"].(map[string]interface{}); ok { + if source, ok := savedObject["searchSourceJSON"].(string); ok { + var record common.MapStr + err = json.Unmarshal([]byte(source), &record) + if err != nil { + return fmt.Errorf("Failed to unmarshal searchSourceJSON from search %s: %v", searchName, err) + } + + if _, ok := record["index"]; ok { + record["index"] = imp.cfg.Index + } + searchSourceJSON, err := json.Marshal(record) + if err != nil { + return fmt.Errorf("Failed to marshal searchSourceJSON: %v", err) + } + + savedObject["searchSourceJSON"] = string(searchSourceJSON) + } + } + + } + + path := "/" + imp.cfg.KibanaIndex + "/search/" + searchName + imp.statusMsg("Import search %s", file) + + if err = imp.client.LoadJSON(path, searchContent); err != nil { + return err + } + + return nil +} + +func (imp Importer) ImportIndex(file string) error { + + reader, err := ioutil.ReadFile(file) + if err != nil { + return err + } + var indexContent common.MapStr + json.Unmarshal(reader, &indexContent) + + indexName, ok := indexContent["title"].(string) + if !ok { + return errors.New(fmt.Sprintf("Missing title in the index-pattern file at %s", file)) + } + + if imp.cfg.Index != "" { + // change index pattern name + imp.statusMsg("Change index in index-pattern %s", indexName) + indexContent["title"] = imp.cfg.Index + } + + path := "/" + imp.cfg.KibanaIndex + "/index-pattern/" + indexName + imp.statusMsg("Import index to %s from %s\n", path, file) + + if err = imp.client.LoadJSON(path, indexContent); err != nil { + return err + } + return nil + +} + +func (imp Importer) ImportFile(fileType string, file string) error { + + if fileType == "dashboard" { + return imp.ImportDashboard(file) + } else if fileType == "index-pattern" { + return imp.ImportIndex(file) + } + return fmt.Errorf("Unexpected file type %s", fileType) +} + +func (imp Importer) ImportDir(dirType string, dir string) error { + + dir = path.Join(dir, dirType) + + imp.statusMsg("Import directory %s", dir) + errors := []string{} + + files, err := filepath.Glob(path.Join(dir, "*.json")) + if err != nil { + return fmt.Errorf("Failed to read directory %s. Error: %s", dir, err) + } + if len(files) == 0 { + return fmt.Errorf("The directory %s is empty, nothing to import", dir) + } + for _, file := range files { + + err = imp.ImportFile(dirType, file) + if err != nil { + errors = append(errors, fmt.Sprintf(" error loading %s: %s", file, err)) + } + } + if len(errors) > 0 { + return fmt.Errorf("Failed to load directory %s:\n%s", dir, strings.Join(errors, "\n")) + } + return nil + +} + +func (imp Importer) unzip(archive, target string) error { + + imp.statusMsg("Unzip archive %s", target) + + reader, err := zip.OpenReader(archive) + if err != nil { + return err + } + + for _, file := range reader.File { + filePath := filepath.Join(target, file.Name) + + if file.FileInfo().IsDir() { + os.MkdirAll(filePath, file.Mode()) + continue + } + fileReader, err := file.Open() + if err != nil { + return err + } + defer fileReader.Close() + + targetFile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, file.Mode()) + if err != nil { + return err + } + defer targetFile.Close() + + if _, err := io.Copy(targetFile, fileReader); err != nil { + return err + } + } + return nil +} + +func (imp Importer) ImportArchive() error { + + var archive string + + target, err := ioutil.TempDir("", "tmp") + if err != nil { + return errors.New("Failed to generate a temporary directory name") + } + + if err = os.MkdirAll(target, 0755); err != nil { + return fmt.Errorf("Failed to create a temporary directory: %v", target) + } + + defer os.RemoveAll(target) // clean up + + imp.statusMsg("Create temporary directory %s", target) + if imp.cfg.File != "" { + archive = imp.cfg.File + } else if imp.cfg.Snapshot { + // In case snapshot is set, snapshot version is fetched + url := imp.cfg.SnapshotURL + archive, err = imp.downloadFile(url, target) + if err != nil { + return fmt.Errorf("Failed to download snapshot file: %s", url) + } + } else if imp.cfg.URL != "" { + archive, err = imp.downloadFile(imp.cfg.URL, target) + if err != nil { + return fmt.Errorf("Failed to download file: %s", imp.cfg.URL) + } + } else { + return errors.New("No archive file or URL is set - please use -file or -url option") + } + + err = imp.unzip(archive, target) + if err != nil { + return fmt.Errorf("Failed to unzip the archive: %s", archive) + } + dirs, err := getDirectories(target) + if err != nil { + return err + } + if len(dirs) != 1 { + return fmt.Errorf("Too many directories under %s", target) + } + + dirs, err = getDirectories(dirs[0]) + if err != nil { + return err + } + + for _, dir := range dirs { + imp.statusMsg("Importing Kibana from %s", dir) + if imp.cfg.Beat == "" || filepath.Base(dir) == imp.cfg.Beat { + err = imp.ImportKibana(dir) + if err != nil { + return err + } + } + } + return nil +} + +func getDirectories(target string) ([]string, error) { + + files, err := ioutil.ReadDir(target) + if err != nil { + return nil, err + } + var dirs []string + + for _, file := range files { + if file.IsDir() { + dirs = append(dirs, filepath.Join(target, file.Name())) + } + } + return dirs, nil +} + +func (imp Importer) downloadFile(url string, target string) (string, error) { + + fileName := filepath.Base(url) + targetPath := path.Join(target, fileName) + imp.statusMsg("Downloading %s", url) + + // Create the file + out, err := os.Create(targetPath) + if err != nil { + return targetPath, err + } + defer out.Close() + + // Get the data + resp, err := http.Get(url) + if err != nil { + return targetPath, err + } + defer resp.Body.Close() + + // Writer the body to file + _, err = io.Copy(out, resp.Body) + if err != nil { + return targetPath, err + } + + return targetPath, nil +} + +// import Kibana dashboards and index-pattern or only one of these +func (imp Importer) ImportKibana(dir string) error { + + var err error + + if _, err := os.Stat(dir); err != nil { + return fmt.Errorf("No directory %s", dir) + } + + check := []string{} + if !imp.cfg.OnlyDashboards { + check = append(check, "index-pattern") + } + if !imp.cfg.OnlyIndex { + check = append(check, "dashboard") + } + + types := []string{} + for _, c := range check { + if imp.subdirExists(dir, c) { + types = append(types, c) + } + } + + if len(types) == 0 { + return fmt.Errorf("The directory %s does not contain the %s subdirectory."+ + " There is nothing to import into Kibana.", dir, strings.Join(check, " or ")) + } + + for _, t := range types { + err = imp.ImportDir(t, dir) + if err != nil { + return fmt.Errorf("Failed to import %s: %v", t, err) + } + } + return nil +} + +func (imp Importer) subdirExists(parent string, child string) bool { + if _, err := os.Stat(path.Join(parent, child)); err != nil { + return false + } + return true +} diff --git a/libbeat/dashboards/dashboards/importer_integration_test.go b/libbeat/dashboards/dashboards/importer_integration_test.go new file mode 100644 index 000000000000..15938668ced3 --- /dev/null +++ b/libbeat/dashboards/dashboards/importer_integration_test.go @@ -0,0 +1,55 @@ +// +build integration + +package dashboards + +import ( + "testing" + + "github.com/elastic/beats/libbeat/logp" + "github.com/elastic/beats/libbeat/outputs/elasticsearch" + "github.com/stretchr/testify/assert" +) + +func TestImporter(t *testing.T) { + if testing.Verbose() { + logp.LogInit(logp.LOG_DEBUG, "", false, true, []string{"*"}) + } + + client := elasticsearch.GetTestingElasticsearch() + + imp, err := NewImporter(&DashboardsConfig{ + KibanaIndex: ".kibana-test", + File: "testdata/testbeat-dashboards.zip", + Beat: "testbeat", + }, client, nil) + + assert.NoError(t, err) + + err = imp.Import() + assert.NoError(t, err) + + status, _, _ := client.Request("GET", "/.kibana-test/dashboard/1e4389f0-e871-11e6-911d-3f8ed6f72700", "", nil, nil) + assert.Equal(t, 200, status) +} + +func TestImporterEmptyBeat(t *testing.T) { + if testing.Verbose() { + logp.LogInit(logp.LOG_DEBUG, "", false, true, []string{"*"}) + } + + client := elasticsearch.GetTestingElasticsearch() + + imp, err := NewImporter(&DashboardsConfig{ + KibanaIndex: ".kibana-test-nobeat", + File: "testdata/testbeat-dashboards.zip", + Beat: "", + }, client, nil) + + assert.NoError(t, err) + + err = imp.Import() + assert.NoError(t, err) + + status, _, _ := client.Request("GET", "/.kibana-test-nobeat/dashboard/1e4389f0-e871-11e6-911d-3f8ed6f72700", "", nil, nil) + assert.Equal(t, 200, status) +} diff --git a/libbeat/dashboards/dashboards/testdata/testbeat-dashboards.zip b/libbeat/dashboards/dashboards/testdata/testbeat-dashboards.zip new file mode 100644 index 000000000000..010cb96b7fb0 Binary files /dev/null and b/libbeat/dashboards/dashboards/testdata/testbeat-dashboards.zip differ diff --git a/libbeat/dashboards/import_dashboards.go b/libbeat/dashboards/import_dashboards.go index d509258d0b66..d81de51b8a98 100644 --- a/libbeat/dashboards/import_dashboards.go +++ b/libbeat/dashboards/import_dashboards.go @@ -1,23 +1,15 @@ package main import ( - "archive/zip" - "encoding/json" "errors" "flag" "fmt" - "io" - "io/ioutil" - "net/http" "os" - "path" - "path/filepath" - "strings" "time" lbeat "github.com/elastic/beats/libbeat/beat" - "github.com/elastic/beats/libbeat/common" "github.com/elastic/beats/libbeat/common/fmtstr" + "github.com/elastic/beats/libbeat/dashboards/dashboards" "github.com/elastic/beats/libbeat/outputs" "github.com/elastic/beats/libbeat/outputs/elasticsearch" "github.com/elastic/beats/libbeat/outputs/outil" @@ -130,9 +122,7 @@ func (cl *CommandLine) ParseCommandLine() error { return nil } -func New() (*Importer, error) { - importer := Importer{} - +func New() (*dashboards.Importer, error) { /* define the command line arguments */ cl, err := DefineCommandLine() if err != nil { @@ -144,7 +134,20 @@ func New() (*Importer, error) { if err != nil { return nil, err } - importer.cl = cl + + cfg := dashboards.DashboardsConfig{ + Enabled: true, + KibanaIndex: cl.opt.KibanaIndex, + Index: cl.opt.Index, + Dir: cl.opt.Dir, + File: cl.opt.File, + Beat: cl.opt.Beat, + URL: cl.opt.URL, + OnlyDashboards: cl.opt.OnlyDashboards, + OnlyIndex: cl.opt.OnlyIndex, + Snapshot: cl.opt.Snapshot, + SnapshotURL: fmt.Sprintf("https://beats-nightlies.s3.amazonaws.com/dashboards/beats-dashboards-%s-SNAPSHOT.zip", lbeat.GetDefaultVersion()), + } /* prepare the Elasticsearch index pattern */ fmtstr, err := fmtstr.CompileEvent(cl.opt.Index) @@ -191,503 +194,20 @@ func New() (*Importer, error) { if err != nil { return nil, fmt.Errorf("Failed to connect to Elasticsearch: %s", err) } - importer.client = client - - return &importer, nil - -} - -func (imp Importer) statusMsg(msg string, a ...interface{}) { - if imp.cl.opt.Quiet { - return - } - - if len(a) == 0 { - fmt.Println(msg) - } else { - fmt.Println(fmt.Sprintf(msg, a...)) - } -} - -func (imp Importer) CreateIndex() error { - imp.client.CreateIndex(imp.cl.opt.KibanaIndex, nil) - _, _, err := imp.client.CreateIndex(imp.cl.opt.KibanaIndex+"/_mapping/search", - common.MapStr{ - "search": common.MapStr{ - "properties": common.MapStr{ - "hits": common.MapStr{ - "type": "integer", - }, - "version": common.MapStr{ - "type": "integer", - }, - }, - }, - }) - if err != nil { - fmt.Fprintln(os.Stderr, fmt.Sprintf("Failed to set the mapping - %s", err)) - } - return nil -} - -func (imp Importer) ImportJSONFile(fileType string, file string) error { - - path := "/" + imp.cl.opt.KibanaIndex + "/" + fileType - - reader, err := ioutil.ReadFile(file) - if err != nil { - return fmt.Errorf("Failed to read %s. Error: %s", file, err) - } - var jsonContent map[string]interface{} - json.Unmarshal(reader, &jsonContent) - fileBase := strings.TrimSuffix(filepath.Base(file), filepath.Ext(file)) - - err = imp.client.LoadJSON(path+"/"+fileBase, jsonContent) - if err != nil { - return fmt.Errorf("Failed to load %s under %s/%s: %s", file, path, fileBase, err) - } - - return nil -} - -func (imp Importer) ImportDashboard(file string) error { - - imp.statusMsg("Import dashboard %s", file) - - /* load dashboard */ - err := imp.ImportJSONFile("dashboard", file) - if err != nil { - return err - } - - /* load the visualizations and searches that depend on the dashboard */ - err = imp.importPanelsFromDashboard(file) - if err != nil { - return err - } - return nil -} - -func (imp Importer) importPanelsFromDashboard(file string) (err error) { - - // directory with the dashboards - dir := filepath.Dir(file) - - // main directory with dashboard, search, visualizations directories - mainDir := filepath.Dir(dir) - - reader, err := ioutil.ReadFile(file) - if err != nil { - return - } - type record struct { - Title string `json:"title"` - PanelsJSON string `json:"panelsJSON"` - } - type panel struct { - ID string `json:"id"` - Type string `json:"type"` - } - - var jsonContent record - json.Unmarshal(reader, &jsonContent) - - var widgets []panel - json.Unmarshal([]byte(jsonContent.PanelsJSON), &widgets) - - for _, widget := range widgets { - - if widget.Type == "visualization" { - err = imp.ImportVisualization(path.Join(mainDir, "visualization", widget.ID+".json")) - if err != nil { - return err - } - } else if widget.Type == "search" { - err = imp.ImportSearch(path.Join(mainDir, "search", widget.ID+".json")) - if err != nil { - return err - } - } else { - imp.statusMsg("Widgets: %v", widgets) - return fmt.Errorf("Unknown panel type %s in %s", widget.Type, file) + statusMsg := dashboards.MessageOutputter(func(msg string, a ...interface{}) { + if cl.opt.Quiet { + return } - } - return -} - -func (imp Importer) importSearchFromVisualization(file string) error { - type record struct { - Title string `json:"title"` - SavedSearchID string `json:"savedSearchId"` - } - - reader, err := ioutil.ReadFile(file) - if err != nil { - return nil - } - - var jsonContent record - json.Unmarshal(reader, &jsonContent) - id := jsonContent.SavedSearchID - if len(id) == 0 { - // no search used - return nil - } - - // directory with the visualizations - dir := filepath.Dir(file) - - // main directory - mainDir := filepath.Dir(dir) - - searchFile := path.Join(mainDir, "search", id+".json") - - if searchFile != "" { - // visualization depends on search - if err := imp.ImportSearch(searchFile); err != nil { - return err - } - } - return nil -} - -func (imp Importer) ImportVisualization(file string) error { - - imp.statusMsg("Import visualization %s", file) - if err := imp.ImportJSONFile("visualization", file); err != nil { - return err - } - - err := imp.importSearchFromVisualization(file) - if err != nil { - return err - } - return nil -} - -func (imp Importer) ImportSearch(file string) error { - - reader, err := ioutil.ReadFile(file) - if err != nil { - return err - } - searchName := strings.TrimSuffix(filepath.Base(file), filepath.Ext(file)) - - var searchContent common.MapStr - err = json.Unmarshal(reader, &searchContent) - if err != nil { - return fmt.Errorf("Failed to unmarshal search content %s: %v", searchName, err) - } - - if imp.cl.opt.Index != "" { - - // change index pattern name - if savedObject, ok := searchContent["kibanaSavedObjectMeta"].(map[string]interface{}); ok { - if source, ok := savedObject["searchSourceJSON"].(string); ok { - var record common.MapStr - err = json.Unmarshal([]byte(source), &record) - if err != nil { - return fmt.Errorf("Failed to unmarshal searchSourceJSON from search %s: %v", searchName, err) - } - - if _, ok := record["index"]; ok { - record["index"] = imp.cl.opt.Index - } - searchSourceJSON, err := json.Marshal(record) - if err != nil { - return fmt.Errorf("Failed to marshal searchSourceJSON: %v", err) - } - - savedObject["searchSourceJSON"] = string(searchSourceJSON) - } - } - - } - - path := "/" + imp.cl.opt.KibanaIndex + "/search/" + searchName - imp.statusMsg("Import search %s", file) - - if err = imp.client.LoadJSON(path, searchContent); err != nil { - return err - } - - return nil -} - -func (imp Importer) ImportIndex(file string) error { - - reader, err := ioutil.ReadFile(file) - if err != nil { - return err - } - var indexContent common.MapStr - json.Unmarshal(reader, &indexContent) - - indexName, ok := indexContent["title"].(string) - if !ok { - return errors.New(fmt.Sprintf("Missing title in the index-pattern file at %s", file)) - } - - if imp.cl.opt.Index != "" { - // change index pattern name - imp.statusMsg("Change index in index-pattern %s", indexName) - indexContent["title"] = imp.cl.opt.Index - } - - path := "/" + imp.cl.opt.KibanaIndex + "/index-pattern/" + indexName - fmt.Printf("Import index to %s from %s\n", path, file) - - if err = imp.client.LoadJSON(path, indexContent); err != nil { - return err - } - return nil - -} - -func (imp Importer) ImportFile(fileType string, file string) error { - if fileType == "dashboard" { - return imp.ImportDashboard(file) - } else if fileType == "index-pattern" { - return imp.ImportIndex(file) - } - return fmt.Errorf("Unexpected file type %s", fileType) -} - -func (imp Importer) ImportDir(dirType string, dir string) error { - - dir = path.Join(dir, dirType) - - imp.statusMsg("Import directory %s", dir) - errors := []string{} - - files, err := filepath.Glob(path.Join(dir, "*.json")) - if err != nil { - return fmt.Errorf("Failed to read directory %s. Error: %s", dir, err) - } - if len(files) == 0 { - return fmt.Errorf("The directory %s is empty, nothing to import", dir) - } - for _, file := range files { - - err = imp.ImportFile(dirType, file) - if err != nil { - errors = append(errors, fmt.Sprintf(" error loading %s: %s", file, err)) - } - } - if len(errors) > 0 { - return fmt.Errorf("Failed to load directory %s:\n%s", dir, strings.Join(errors, "\n")) - } - return nil - -} - -func (imp Importer) unzip(archive, target string) error { - - imp.statusMsg("Unzip archive %s", target) - - reader, err := zip.OpenReader(archive) - if err != nil { - return err - } - - for _, file := range reader.File { - filePath := filepath.Join(target, file.Name) - - if file.FileInfo().IsDir() { - os.MkdirAll(filePath, file.Mode()) - continue - } - fileReader, err := file.Open() - if err != nil { - return err - } - defer fileReader.Close() - - targetFile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, file.Mode()) - if err != nil { - return err - } - defer targetFile.Close() - - if _, err := io.Copy(targetFile, fileReader); err != nil { - return err - } - } - return nil -} - -func getMainDir(target string) (string, error) { - - files, err := ioutil.ReadDir(target) - if err != nil { - return "", err - } - var dirs []string - - for _, file := range files { - if file.IsDir() { - dirs = append(dirs, file.Name()) - } - } - if len(dirs) != 1 { - return "", fmt.Errorf("Too many subdirectories under %s", target) - } - return filepath.Join(target, dirs[0]), nil -} - -func getDirectories(target string) ([]string, error) { - - files, err := ioutil.ReadDir(target) - if err != nil { - return nil, err - } - var dirs []string - - for _, file := range files { - if file.IsDir() { - dirs = append(dirs, filepath.Join(target, file.Name())) - } - } - return dirs, nil -} - -func (imp Importer) downloadFile(url string, target string) (string, error) { - - fileName := filepath.Base(url) - targetPath := path.Join(target, fileName) - imp.statusMsg("Downloading %s", url) - - // Create the file - out, err := os.Create(targetPath) - if err != nil { - return targetPath, err - } - defer out.Close() - - // Get the data - resp, err := http.Get(url) - if err != nil { - return targetPath, err - } - defer resp.Body.Close() - - // Writer the body to file - _, err = io.Copy(out, resp.Body) - if err != nil { - return targetPath, err - } - - return targetPath, nil -} - -func (imp Importer) ImportArchive() error { - - var archive string - - target, err := ioutil.TempDir("", "tmp") - if err != nil { - return errors.New("Failed to generate a temporary directory name") - } - - if err = os.MkdirAll(target, 0755); err != nil { - return fmt.Errorf("Failed to create a temporary directory: %v", target) - } - - defer os.RemoveAll(target) // clean up - - imp.statusMsg("Create temporary directory %s", target) - if imp.cl.opt.File != "" { - archive = imp.cl.opt.File - } else if imp.cl.opt.Snapshot { - // In case snapshot is set, snapshot version is fetched - url := fmt.Sprintf("https://beats-nightlies.s3.amazonaws.com/dashboards/beats-dashboards-%s-SNAPSHOT.zip", lbeat.GetDefaultVersion()) - archive, err = imp.downloadFile(url, target) - if err != nil { - return fmt.Errorf("Failed to download snapshot file: %s", url) - } - } else if imp.cl.opt.URL != "" { - archive, err = imp.downloadFile(imp.cl.opt.URL, target) - if err != nil { - return fmt.Errorf("Failed to download file: %s", imp.cl.opt.URL) - } - } else { - return errors.New("No archive file or URL is set - please use -file or -url option") - } - - err = imp.unzip(archive, target) - if err != nil { - return fmt.Errorf("Failed to unzip the archive: %s", archive) - } - dirs, err := getDirectories(target) - if err != nil { - return err - } - if len(dirs) != 1 { - return fmt.Errorf("Too many directories under %s", target) - } - - dirs, err = getDirectories(dirs[0]) - if err != nil { - return err - } - - for _, dir := range dirs { - imp.statusMsg("Importing Kibana from %s", dir) - if imp.cl.opt.Beat == "" || filepath.Base(dir) == imp.cl.opt.Beat { - err = imp.ImportKibana(dir) - if err != nil { - return err - } - } - } - return nil -} - -func (imp Importer) subdirExists(parent string, child string) bool { - if _, err := os.Stat(path.Join(parent, child)); err != nil { - return false - } - return true -} - -// import Kibana dashboards and index-pattern or only one of these -func (imp Importer) ImportKibana(dir string) error { - - var err error - - if _, err := os.Stat(dir); err != nil { - return fmt.Errorf("No directory %s", dir) - } - - check := []string{} - if !imp.cl.opt.OnlyDashboards { - check = append(check, "index-pattern") - } - if !imp.cl.opt.OnlyIndex { - check = append(check, "dashboard") - } - - types := []string{} - for _, c := range check { - if imp.subdirExists(dir, c) { - types = append(types, c) + if len(a) == 0 { + fmt.Println(msg) + } else { + fmt.Println(fmt.Sprintf(msg, a...)) } - } + }) - if len(types) == 0 { - return fmt.Errorf("The directory %s does not contain the %s subdirectory."+ - " There is nothing to import into Kibana.", dir, strings.Join(check, " or ")) - } - - for _, t := range types { - err = imp.ImportDir(t, dir) - if err != nil { - return fmt.Errorf("Failed to import %s: %v", t, err) - } - } - return nil + return dashboards.NewImporter(&cfg, client, &statusMsg) } func main() { @@ -698,25 +218,10 @@ func main() { fmt.Fprintln(os.Stderr, "Exiting") os.Exit(1) } - if err := importer.CreateIndex(); err != nil { + err = importer.Import() + if err != nil { fmt.Fprintln(os.Stderr, err) fmt.Fprintln(os.Stderr, "Exiting") os.Exit(1) } - - if importer.cl.opt.Dir != "" { - if err = importer.ImportKibana(importer.cl.opt.Dir); err != nil { - fmt.Fprintln(os.Stderr, err) - fmt.Fprintln(os.Stderr, "Exiting") - os.Exit(1) - } - } else { - if importer.cl.opt.URL != "" || importer.cl.opt.File != "" { - if err = importer.ImportArchive(); err != nil { - fmt.Fprintln(os.Stderr, err) - fmt.Fprintln(os.Stderr, "Exiting") - os.Exit(1) - } - } - } } diff --git a/libbeat/docs/dashboardsconfig.asciidoc b/libbeat/docs/dashboardsconfig.asciidoc new file mode 100644 index 000000000000..b894683007f5 --- /dev/null +++ b/libbeat/docs/dashboardsconfig.asciidoc @@ -0,0 +1,87 @@ +////////////////////////////////////////////////////////////////////////// +//// This content is shared by all Elastic Beats. Make sure you keep the +//// descriptions here generic enough to work for all Beats that include +//// this file. When using cross references, make sure that the cross +//// references resolve correctly for any files that include this one. +//// Use the appropriate variables defined in the index.asciidoc file to +//// resolve Beat names: beatname_uc and beatname_lc +//// Use the following include to pull this content into a doc file: +//// include::../../libbeat/docs/dashboardsconfig.asciidoc[] +//// Make sure this content appears below a level 2 heading. +////////////////////////////////////////////////////////////////////////// + +[[configuration-dashboards]] +=== Dashboards Configuration + +beta[] + +The `dashboards` section of the +{beatname_lc}.yml+ config file contains options +for the automatic loading of the sample Beats dashboards. The loading of the +dashboards is disabled by default, but can be enabled either from the configuration +file or by using the `-setup` CLI flag. + +If dashboard loading is enabled, {beatname_uc} attempts to configure Kibana by +writing directly in the Elasticsearch index for the Kibana configuration (by +default, `.kibana`). To connect to Elasticsearch, it uses the settings defined +in the Eleasticsearch output. If the Elasticsearch output is not configured or +not enabled, {beatname_uc} will stop with an error. Loading the dashboards is +only attempted at the Beat start, if Elasticsearch is not available when the +Beat starts, {beatname_uc} will stop with an error. + +[source,yaml] +------------------------------------------------------------------------------ +dashboards.enabled: true +------------------------------------------------------------------------------ + +==== Dashboards Loading Options + +You can specify the following options in the `dashboards` section of the ++{beatname_lc}.yml+ config file: + +===== enabled + +If enabled, load the sample Kibana dashboards on startup. If no other options +are set, the dashboards archive is downloaded from the elastic.co website. + +===== url + +The URL from where to download the dashboards archive. By default this URL has a +value which is computed based on the Beat name and version. For released +versions, this URL points to the dashboard archive on the artifacts.elastic.co +website. + +===== directory + +The directory from where to read the dashboards. It is used instead of the URL +when it has a value. + +===== file + +The file archive (zip file) from where to read the dashboards. It is used +instead of the URL when it has a value. + +===== snapshot + +If this option is set to true, the snapshot URL is used instead of the default +URL. + +===== snapshot_url + +The URL from where to download the snapshot version of the dashboards. By +default this has a value which is computed based on the Beat name and version. + +===== beat + +In case the archive contains the dashboards from multiple Beats, this lets you +select which one to load. You can load all the dashboards in the archive by +setting this to the empty string. The default is "{beatname_lc}". + +===== kibana_index + +The name of the Kibana index to use for setting the configuration. Default is +".kibana" + +===== index + +The Elasticsearch index name. This overwrites the index name defined in the +dashboards and index pattern. Example: "testbeat-*" diff --git a/libbeat/docs/shared-command-line.asciidoc b/libbeat/docs/shared-command-line.asciidoc index 23a1ded6e0e3..84304dab674e 100644 --- a/libbeat/docs/shared-command-line.asciidoc +++ b/libbeat/docs/shared-command-line.asciidoc @@ -60,6 +60,10 @@ Set the default location for miscellaneous files. See the <> s *`-path.logs`*:: Set the default location for log files. See the <> section for details. +*`-setup`*:: +Load the sample Kibana dashboards. By default, this downloads an archive file containing the Beats dashboards +from the elastic.co website. See the <> section for more details and more options. + *`-v`*:: Enable verbose output to show INFO-level messages. diff --git a/metricbeat/docs/reference/configuration.asciidoc b/metricbeat/docs/reference/configuration.asciidoc index 0f4c26fe90bc..56d88fabb337 100644 --- a/metricbeat/docs/reference/configuration.asciidoc +++ b/metricbeat/docs/reference/configuration.asciidoc @@ -18,6 +18,7 @@ configuration settings, you need to restart {beatname_uc} to pick up the changes * <> * <> * <> +* <> * <> * <> diff --git a/metricbeat/docs/reference/configuration/metricbeat-options.asciidoc b/metricbeat/docs/reference/configuration/metricbeat-options.asciidoc index 1bbf6c2e4c54..3dce78cfd824 100644 --- a/metricbeat/docs/reference/configuration/metricbeat-options.asciidoc +++ b/metricbeat/docs/reference/configuration/metricbeat-options.asciidoc @@ -77,6 +77,9 @@ include::../../../../libbeat/docs/outputconfig.asciidoc[] pass::[] include::../../../../libbeat/docs/shared-path-config.asciidoc[] +pass::[] +include::../../../../libbeat/docs/dashboardsconfig.asciidoc[] + pass::[] include::../../../../libbeat/docs/loggingconfig.asciidoc[] diff --git a/metricbeat/metricbeat.full.yml b/metricbeat/metricbeat.full.yml index 38cc00c9cc13..830815037696 100644 --- a/metricbeat/metricbeat.full.yml +++ b/metricbeat/metricbeat.full.yml @@ -841,6 +841,45 @@ output.elasticsearch: # the default for the logs path is a logs subdirectory inside the home path. #path.logs: ${path.home}/logs +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag. +#dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#dashboards.url: + +# The directory from where to read the dashboards. It is used instead of the URL +# when it has a value. +#dashboards.directory: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the URL when it has a value. +#dashboards.file: + +# If this option is enabled, the snapshot URL is used instead of the default URL. +#dashboard.snapshot: false + +# The URL from where to download the snapshot version of the dashboards. By default +# this has a value which is computed based on the Beat name and version. +#dashboard.snapshot_url + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#dashboard.beat: metricbeat + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#dashboards.index: + #================================ Logging ====================================== # There are three options for the log output: syslog, file, stderr. # Under Windows systems, the log files are per default sent to the file output, diff --git a/packetbeat/docs/reference/configuration.asciidoc b/packetbeat/docs/reference/configuration.asciidoc index db2c97cadab6..709ed29c7b63 100644 --- a/packetbeat/docs/reference/configuration.asciidoc +++ b/packetbeat/docs/reference/configuration.asciidoc @@ -21,6 +21,7 @@ configuration settings, you need to restart {beatname_uc} to pick up the changes * <> * <> * <> +* <> * <> * <> * <> diff --git a/packetbeat/docs/reference/configuration/packetbeat-options.asciidoc b/packetbeat/docs/reference/configuration/packetbeat-options.asciidoc index 7f90c349b694..f0a49bc61304 100644 --- a/packetbeat/docs/reference/configuration/packetbeat-options.asciidoc +++ b/packetbeat/docs/reference/configuration/packetbeat-options.asciidoc @@ -801,6 +801,9 @@ include::../../../../libbeat/docs/outputconfig.asciidoc[] pass::[] include::../../../../libbeat/docs/shared-path-config.asciidoc[] +pass::[] +include::../../../../libbeat/docs/dashboardsconfig.asciidoc[] + pass::[] include::../../../../libbeat/docs/loggingconfig.asciidoc[] diff --git a/packetbeat/packetbeat.full.yml b/packetbeat/packetbeat.full.yml index 6e7f6e29ae07..35d8aa0960a5 100644 --- a/packetbeat/packetbeat.full.yml +++ b/packetbeat/packetbeat.full.yml @@ -1001,6 +1001,45 @@ output.elasticsearch: # the default for the logs path is a logs subdirectory inside the home path. #path.logs: ${path.home}/logs +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag. +#dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#dashboards.url: + +# The directory from where to read the dashboards. It is used instead of the URL +# when it has a value. +#dashboards.directory: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the URL when it has a value. +#dashboards.file: + +# If this option is enabled, the snapshot URL is used instead of the default URL. +#dashboard.snapshot: false + +# The URL from where to download the snapshot version of the dashboards. By default +# this has a value which is computed based on the Beat name and version. +#dashboard.snapshot_url + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#dashboard.beat: packetbeat + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#dashboards.index: + #================================ Logging ====================================== # There are three options for the log output: syslog, file, stderr. # Under Windows systems, the log files are per default sent to the file output, diff --git a/winlogbeat/docs/reference/configuration.asciidoc b/winlogbeat/docs/reference/configuration.asciidoc index e100c0c92618..0335ee93974e 100644 --- a/winlogbeat/docs/reference/configuration.asciidoc +++ b/winlogbeat/docs/reference/configuration.asciidoc @@ -19,6 +19,7 @@ configuration settings, you need to restart {beatname_uc} to pick up the changes * <> * <> * <> +* <> * <> * <> diff --git a/winlogbeat/docs/reference/configuration/winlogbeat-options.asciidoc b/winlogbeat/docs/reference/configuration/winlogbeat-options.asciidoc index fbc7622e31fa..b97b9b560744 100644 --- a/winlogbeat/docs/reference/configuration/winlogbeat-options.asciidoc +++ b/winlogbeat/docs/reference/configuration/winlogbeat-options.asciidoc @@ -358,6 +358,9 @@ include::../../../../libbeat/docs/outputconfig.asciidoc[] pass::[] include::../../../../libbeat/docs/shared-path-config.asciidoc[] +pass::[] +include::../../../../libbeat/docs/dashboardsconfig.asciidoc[] + pass::[] include::../../../../libbeat/docs/loggingconfig.asciidoc[] diff --git a/winlogbeat/winlogbeat.full.yml b/winlogbeat/winlogbeat.full.yml index 38b9bd24e9f2..9b43f04aa09c 100644 --- a/winlogbeat/winlogbeat.full.yml +++ b/winlogbeat/winlogbeat.full.yml @@ -582,6 +582,45 @@ output.elasticsearch: # the default for the logs path is a logs subdirectory inside the home path. #path.logs: ${path.home}/logs +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag. +#dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#dashboards.url: + +# The directory from where to read the dashboards. It is used instead of the URL +# when it has a value. +#dashboards.directory: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the URL when it has a value. +#dashboards.file: + +# If this option is enabled, the snapshot URL is used instead of the default URL. +#dashboard.snapshot: false + +# The URL from where to download the snapshot version of the dashboards. By default +# this has a value which is computed based on the Beat name and version. +#dashboard.snapshot_url + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#dashboard.beat: winlogbeat + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#dashboards.index: + #================================ Logging ====================================== # There are three options for the log output: syslog, file, stderr. # Under Windows systems, the log files are per default sent to the file output,