From 6ffb072f548fdeaec444de7064d76ebff2fe2f67 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 13 Oct 2023 12:55:07 -0400 Subject: [PATCH] chores(examples): Convert config/examples from TOML to YAML (#18832) * chores(examples): Convert examples from TOML to YAML * removed trailing whitespaces --- config/examples/docs_example.toml | 44 ------------- config/examples/docs_example.yaml | 48 ++++++++++++++ ...iables.toml => environment_variables.yaml} | 37 ++++++----- config/examples/es_s3_hybrid.toml | 37 ----------- config/examples/es_s3_hybrid.yaml | 41 ++++++++++++ .../examples/file_to_cloudwatch_metrics.toml | 48 -------------- .../examples/file_to_cloudwatch_metrics.yaml | 53 +++++++++++++++ config/examples/file_to_prometheus.toml | 65 ------------------- config/examples/file_to_prometheus.yaml | 64 ++++++++++++++++++ .../namespacing/sinks/es_cluster.toml | 5 -- .../namespacing/sinks/es_cluster.yaml | 6 ++ .../namespacing/sinks/s3_archives.toml | 11 ---- .../namespacing/sinks/s3_archives.yaml | 13 ++++ .../namespacing/sources/apache_logs.toml | 5 -- .../namespacing/sources/apache_logs.yaml | 5 ++ .../namespacing/transforms/apache_parser.toml | 7 -- .../namespacing/transforms/apache_parser.yaml | 7 ++ .../namespacing/transforms/apache_sample.toml | 6 -- .../namespacing/transforms/apache_sample.yaml | 4 ++ config/examples/namespacing/vector.toml | 3 - config/examples/namespacing/vector.yaml | 2 + config/examples/prometheus_to_console.toml | 17 ----- config/examples/prometheus_to_console.yaml | 20 ++++++ config/examples/{stdio.toml => stdio.yaml} | 15 +++-- .../{wrapped_json.toml => wrapped_json.yaml} | 45 +++++++------ 25 files changed, 317 insertions(+), 291 deletions(-) delete mode 100644 config/examples/docs_example.toml create mode 100644 config/examples/docs_example.yaml rename config/examples/{environment_variables.toml => environment_variables.yaml} (65%) delete mode 100644 config/examples/es_s3_hybrid.toml create mode 100644 config/examples/es_s3_hybrid.yaml delete mode 100644 config/examples/file_to_cloudwatch_metrics.toml create mode 100644 config/examples/file_to_cloudwatch_metrics.yaml delete mode 100644 config/examples/file_to_prometheus.toml create mode 100644 config/examples/file_to_prometheus.yaml delete mode 100644 config/examples/namespacing/sinks/es_cluster.toml create mode 100644 config/examples/namespacing/sinks/es_cluster.yaml delete mode 100644 config/examples/namespacing/sinks/s3_archives.toml create mode 100644 config/examples/namespacing/sinks/s3_archives.yaml delete mode 100644 config/examples/namespacing/sources/apache_logs.toml create mode 100644 config/examples/namespacing/sources/apache_logs.yaml delete mode 100644 config/examples/namespacing/transforms/apache_parser.toml create mode 100644 config/examples/namespacing/transforms/apache_parser.yaml delete mode 100644 config/examples/namespacing/transforms/apache_sample.toml create mode 100644 config/examples/namespacing/transforms/apache_sample.yaml delete mode 100644 config/examples/namespacing/vector.toml create mode 100644 config/examples/namespacing/vector.yaml delete mode 100644 config/examples/prometheus_to_console.toml create mode 100644 config/examples/prometheus_to_console.yaml rename config/examples/{stdio.toml => stdio.yaml} (65%) rename config/examples/{wrapped_json.toml => wrapped_json.yaml} (54%) diff --git a/config/examples/docs_example.toml b/config/examples/docs_example.toml deleted file mode 100644 index c321af1aa8427..0000000000000 --- a/config/examples/docs_example.toml +++ /dev/null @@ -1,44 +0,0 @@ -# Set global options -data_dir = "/var/lib/vector" - -# Ingest data by tailing one or more files -[sources.apache_logs] -type = "file" -include = ["/var/log/apache2/*.log"] # supports globbing -ignore_older = 86400 # 1 day - -# Structure and parse the data -[transforms.apache_parser] -inputs = ["apache_logs"] -type = "remap" -drop_on_error = false -source = ''' -. = parse_apache_log!(.message) -''' - -# Sample the data to save on cost -[transforms.apache_sample] -inputs = ["apache_parser"] -type = "sample" -rate = 2 # only keep 50% (1/`rate`) - -# Send structured data to a short-term storage -[sinks.es_cluster] -inputs = ["apache_sample"] # only take sampled data -type = "elasticsearch" -endpoint = "http://79.12.221.222:9200" # local or external host -[sinks.es_cluster.bulk] -index = "vector-%Y-%m-%d" # daily indices - -# Send structured data to a cost-effective long-term storage -[sinks.s3_archives] -inputs = ["apache_parser"] # don't sample for S3 -type = "aws_s3" -region = "us-east-1" -bucket = "my-log-archives" -key_prefix = "date=%Y-%m-%d" # daily partitions, hive friendly format -compression = "gzip" # compress final objects -framing.method = "newline_delimited" # new line delimited... -encoding.codec = "json" # ...JSON -[sinks.s3_archives.batch] -max_bytes = 10000000 # 10mb uncompressed diff --git a/config/examples/docs_example.yaml b/config/examples/docs_example.yaml new file mode 100644 index 0000000000000..bc51168a19e07 --- /dev/null +++ b/config/examples/docs_example.yaml @@ -0,0 +1,48 @@ +# Set global options +"data_dir": "/var/lib/vector" + +# Ingest data by tailing one or more files +"sources": + "apache_logs": + "type": "file" + "include": [ "/var/log/apache2/*.log" ] # supports globbing + "ignore_older": 86400 # 1 day + +# Structure and parse the data +"transforms": + "apache_parser": + "inputs": [ "apache_logs" ] + "type": "remap" + "drop_on_error": false + "source": ''' +. = parse_apache_log!(.message) +''' + + "apache_sample": + "inputs": [ "apache_parser" ] + "type": "sample" + "rate": 2 # only keep 50% (1/`rate`) + +# Send structured data to a short-term storage +"sinks": + "es_cluster": + "inputs": [ "apache_sample" ] # only take sampled data + "type": "elasticsearch" + "endpoint": "http://79.12.221.222:9200" # local or external host + "bulk": + "index": "vector-%Y-%m-%d" # daily indices + + # Send structured data to a cost-effective long-term storage + "s3_archives": + "inputs": [ "apache_parser" ] # don't sample for S3 + "type": "aws_s3" + "region": "us-east-1" + "bucket": "my-log-archives" + "key_prefix": "date=%Y-%m-%d" # daily partitions, hive friendly format + "compression": "gzip" # compress final objects + "framing": + "method": "newline_delimited" # new line delimited... + "encoding": + "codec": "json" # ...JSON + "batch": + "max_bytes": 10000000 # 10mb uncompressed diff --git a/config/examples/environment_variables.toml b/config/examples/environment_variables.yaml similarity index 65% rename from config/examples/environment_variables.toml rename to config/examples/environment_variables.yaml index 1a2759bc65077..19324217ae4b4 100644 --- a/config/examples/environment_variables.toml +++ b/config/examples/environment_variables.yaml @@ -5,29 +5,34 @@ # Variables section in our docs: # # https://vector.dev/docs/setup/configuration#environment-variables - -data_dir = "/var/lib/vector" +data_dir: "/var/lib/vector" # Ingests Apache 2 log data by tailing one or more log files # Example: 194.221.90.140 - - [22/06/2019:11:55:14 -0400] "PUT /integrate" 100 2213 # Docs: https://vector.dev/docs/reference/sources/file -[sources.apache_logs] - type = "file" - include = ["/var/log/apache2/*.log"] - ignore_older = 86400 # 1 day +sources: + apache_logs: + type: "file" + include: [ "/var/log/apache2/*.log" ] + # ignore files older than 1 day + ignore_older: 86400 # Add a field based on the value of the HOSTNAME env var # Docs: https://vector.dev/docs/reference/transforms/remap -[transforms.add_host] - inputs = ["apache_logs"] - type = "remap" - source = ''' - .host = get_env_var!("HOSTNAME") - ''' +transforms: + add_host: + inputs: [ "apache_logs" ] + type: "remap" + source: | + ''' + .host = get_env_var!("HOSTNAME") + ''' # Print the data to STDOUT for inspection # Docs: https://vector.dev/docs/reference/sinks/console -[sinks.out] - inputs = ["add_host"] - type = "console" - encoding.codec = "json" +sinks: + out: + inputs: [ "add_host" ] + type: "console" + encoding: + codec: "json" diff --git a/config/examples/es_s3_hybrid.toml b/config/examples/es_s3_hybrid.toml deleted file mode 100644 index 6d2cb6ecf2767..0000000000000 --- a/config/examples/es_s3_hybrid.toml +++ /dev/null @@ -1,37 +0,0 @@ -# Elasticsearch / S3 Hybrid Vector Configuration Example -# ------------------------------------------------------------------------------ -# This demonstrates a hybrid pipeline, writing data to both Elasticsearch and -# AWS S3. This is advantageous because each storage helps to offset its -# counterpart's weaknesses. You can provision Elasticsearch for performance -# and delegate durability to S3. - -data_dir = "/var/lib/vector" - -# Ingest data by tailing one or more files -# Docs: https://vector.dev/docs/reference/sources/file -[sources.apache_logs] - type = "file" - include = ["/var/log/*.log"] - ignore_older = 86400 # 1 day - -# Optionally parse, structure and transform data here. -# Docs: https://vector.dev/docs/reference/transforms - -# Send structured data to Elasticsearch for searching of recent data -[sinks.es_cluster] - inputs = ["apache_logs"] - type = "elasticsearch" - endpoint = "79.12.221.222:9200" - doc_type = "_doc" - -# Send structured data to S3, a durable long-term storage -[sinks.s3_archives] - inputs = ["apache_logs"] # don't sample - type = "aws_s3" - region = "us-east-1" - bucket = "my_log_archives" - framing.method = "newline_delimited" - encoding.codec = "json" - compression = "gzip" - [sinks.s3_archives.batch] - max_size = 10000000 # 10mb uncompressed diff --git a/config/examples/es_s3_hybrid.yaml b/config/examples/es_s3_hybrid.yaml new file mode 100644 index 0000000000000..ba0719f7c31b3 --- /dev/null +++ b/config/examples/es_s3_hybrid.yaml @@ -0,0 +1,41 @@ +# Elasticsearch / S3 Hybrid Vector Configuration Example +# ------------------------------------------------------------------------------ +# This demonstrates a hybrid pipeline, writing data to both Elasticsearch and +# AWS S3. This is advantageous because each storage helps to offset its +# counterpart's weaknesses. You can provision Elasticsearch for performance +# and delegate durability to S3. + +data_dir: "/var/lib/vector" + +# Ingest data by tailing one or more files +# Docs: https://vector.dev/docs/reference/sources/file +sources: + apache_logs: + type: "file" + include: ["/var/log/*.log"] + ignore_older: 86400 # 1 day + +# Optionally parse, structure and transform data here. +# Docs: https://vector.dev/docs/reference/transforms + +# Send structured data to Elasticsearch for searching of recent data +sinks: + es_cluster: + inputs: ["apache_logs"] + type: "elasticsearch" + endpoint: "79.12.221.222:9200" + doc_type: "_doc" + + # Send structured data to S3, a durable long-term storage + s3_archives: + inputs: ["apache_logs"] # don't sample + type: "aws_s3" + region: "us-east-1" + bucket: "my_log_archives" + framing: + method: "newline_delimited" + encoding: + codec: "json" + compression: "gzip" + batch: + max_size: 10000000 # 10mb uncompressed diff --git a/config/examples/file_to_cloudwatch_metrics.toml b/config/examples/file_to_cloudwatch_metrics.toml deleted file mode 100644 index affdc2995388e..0000000000000 --- a/config/examples/file_to_cloudwatch_metrics.toml +++ /dev/null @@ -1,48 +0,0 @@ -# Parsing logs as metrics and sending to CloudWatch -# ------------------------------------------------------------------------------ -# WIP - -data_dir = "/var/lib/vector" - -# Ingest -[sources.file] -type = "file" -include = ["sample.log"] -start_at_beginning = true - -# Structure and parse the data -[transforms.remap] -inputs = ["file"] -type = "remap" -drop_on_error = false -source = ''' -. |= parse_apache_log!(string!(.message), "common") -''' - -# Transform into metrics -[transforms.log_to_metric] -inputs = ["remap"] -type = "log_to_metric" - -[[transforms.log_to_metric.metrics]] -type = "counter" -increment_by_value = true -field = "bytes_out" -tags = {method = "{{method}}", status = "{{status}}"} - -# Output data -[sinks.console_metrics] -inputs = ["log_to_metric"] -type = "console" -encoding.codec = "json" - -[sinks.console_logs] -inputs = ["remap"] -type = "console" -encoding.codec = "json" - -[sinks.cloudwatch] -inputs = ["log_to_metric"] -type = "aws_cloudwatch_metrics" -namespace = "vector" -endpoint = "http://localhost:4566" diff --git a/config/examples/file_to_cloudwatch_metrics.yaml b/config/examples/file_to_cloudwatch_metrics.yaml new file mode 100644 index 0000000000000..f322ab8f091b0 --- /dev/null +++ b/config/examples/file_to_cloudwatch_metrics.yaml @@ -0,0 +1,53 @@ +# Parsing logs as metrics and sending to CloudWatch +# ------------------------------------------------------------------------------ +# WIP + +data_dir: "/var/lib/vector" + +# Ingest +sources: + file: + type: "file" + include: [ "sample.log" ] + start_at_beginning: true + +# Structure and parse the data +transforms: + remap: + inputs: [ "file" ] + type: "remap" + drop_on_error: false + source: | + . |= parse_apache_log!(string!(.message), "common") + + # Transform into metrics + log_to_metric: + inputs: [ "remap" ] + type: "log_to_metric" + metrics: + - type: "counter" + increment_by_value: true + field: "bytes_out" + tags: + method: "{{method}}" + status: "{{status}}" + +# Output data +sinks: + console_metrics: + inputs: [ "log_to_metric" ] + type: "console" + encoding: + codec: "json" + + console_logs: + inputs: [ "remap" ] + type: "console" + encoding: + codec: "json" + + cloudwatch: + inputs: [ "log_to_metric" ] + type: "aws_cloudwatch_metrics" + namespace: "vector" + endpoint: "http://localhost:4566" diff --git a/config/examples/file_to_prometheus.toml b/config/examples/file_to_prometheus.toml deleted file mode 100644 index 7219ef760bfdc..0000000000000 --- a/config/examples/file_to_prometheus.toml +++ /dev/null @@ -1,65 +0,0 @@ -# Prometheus sink example -# ------------------------------------------------------------------------------ -# Parsing logs as metrics and exposing into Prometheus - -data_dir = "/var/lib/vector" - -# Ingest -[sources.file] -type = "file" -include = ["sample.log"] -start_at_beginning = true - -# Structure and parse the data -[transforms.remap] -inputs = ["file"] -type = "remap" -drop_on_error = false -source = ''' -. |= parse_apache_log!(string!(.message), "common") -''' - -# Transform into metrics -[transforms.log_to_metric] -inputs = ["remap"] -type = "log_to_metric" - -[[transforms.log_to_metric.metrics]] -type = "counter" -field = "message" - -[[transforms.log_to_metric.metrics]] -type = "counter" -increment_by_value = true -field = "bytes_out" -name = "bytes_out_total" - -[[transforms.log_to_metric.metrics]] -type = "gauge" -field = "bytes_out" - -[[transforms.log_to_metric.metrics]] -type = "set" -field = "user" - -[[transforms.log_to_metric.metrics]] -type = "histogram" -field = "bytes_out" -name = "bytes_out_histogram" - -# Output data -[sinks.console_metrics] -inputs = ["log_to_metric"] -type = "console" -encoding.codec = "json" - -[sinks.console_logs] -inputs = ["remap"] -type = "console" -encoding.codec = "text" - -[sinks.prometheus] -inputs = ["log_to_metric"] -type = "prometheus_exporter" -default_namespace = "vector" -buckets = [0.0, 10.0, 100.0, 1000.0, 10000.0, 100001.0] diff --git a/config/examples/file_to_prometheus.yaml b/config/examples/file_to_prometheus.yaml new file mode 100644 index 0000000000000..b3db53f1082fb --- /dev/null +++ b/config/examples/file_to_prometheus.yaml @@ -0,0 +1,64 @@ +# Prometheus sink example +# ---------------------------------------------------- +# Parsing logs as metrics and exposing into Prometheus +data_dir: "/var/lib/vector" + +# Ingest +sources: + file: + type: "file" + include: [ "sample.log" ] + start_at_beginning: true + +# Structure and parse the data +transforms: + remap: + inputs: [ "file" ] + type: "remap" + drop_on_error: false + source: ''' + . |= parse_apache_log!(string!(.message), "common") + ''' + + # Transform into metrics + log_to_metric: + inputs: [ "remap" ] + type: "log_to_metric" + metrics: + - type: "counter" + field: "message" + + - type: "counter" + increment_by_value: true + field: "bytes_out" + name: "bytes_out_total" + + - type: "gauge" + field: "bytes_out" + + - type: "set" + field: "user" + + - type: "histogram" + field: "bytes_out" + name: "bytes_out_histogram" + +# Output data +sinks: + console_metrics: + inputs: [ "log_to_metric" ] + type: "console" + encoding: + codec: "json" + + console_logs: + inputs: [ "remap" ] + type: "console" + encoding: + codec: "text" + + prometheus: + inputs: [ "log_to_metric" ] + type: "prometheus_exporter" + default_namespace: "vector" + buckets: [ 0.0, 10.0, 100.0, 1000.0, 10000.0, 100001.0 ] diff --git a/config/examples/namespacing/sinks/es_cluster.toml b/config/examples/namespacing/sinks/es_cluster.toml deleted file mode 100644 index 6b3b5fd4dd73a..0000000000000 --- a/config/examples/namespacing/sinks/es_cluster.toml +++ /dev/null @@ -1,5 +0,0 @@ -# Send structured data to a short-term storage -inputs = ["apache_sample"] # only take sampled data -type = "elasticsearch" -endpoint = "http://79.12.221.222:9200" # local or external host -bulk.index = "vector-%Y-%m-%d" # daily indices diff --git a/config/examples/namespacing/sinks/es_cluster.yaml b/config/examples/namespacing/sinks/es_cluster.yaml new file mode 100644 index 0000000000000..820a49080b474 --- /dev/null +++ b/config/examples/namespacing/sinks/es_cluster.yaml @@ -0,0 +1,6 @@ +# Send structured data to a short-term storage +inputs: ["apache_sample"] # only take sampled data +type: "elasticsearch" +endpoint: "http://79.12.221.222:9200" # local or external host +bulk: + index: "vector-%Y-%m-%d" # daily indices diff --git a/config/examples/namespacing/sinks/s3_archives.toml b/config/examples/namespacing/sinks/s3_archives.toml deleted file mode 100644 index 4f40df9a58212..0000000000000 --- a/config/examples/namespacing/sinks/s3_archives.toml +++ /dev/null @@ -1,11 +0,0 @@ -# Send structured data to a cost-effective long-term storage -inputs = ["apache_parser"] # don't sample for S3 -type = "aws_s3" -region = "us-east-1" -bucket = "my-log-archives" -key_prefix = "date=%Y-%m-%d" # daily partitions, hive friendly format -compression = "gzip" # compress final objects -framing.method = "newline_delimited" # new line delimited... -encoding.codec = "json" # ...JSON -[batch] - max_bytes = 10000000 # 10mb uncompressed diff --git a/config/examples/namespacing/sinks/s3_archives.yaml b/config/examples/namespacing/sinks/s3_archives.yaml new file mode 100644 index 0000000000000..a72c27027e8a1 --- /dev/null +++ b/config/examples/namespacing/sinks/s3_archives.yaml @@ -0,0 +1,13 @@ +# Send structured data to a cost-effective long-term storage +inputs: ["apache_parser"] # don't sample for S3 +type: "aws_s3" +region: "us-east-1" +bucket: "my-log-archives" +key_prefix: "date=%Y-%m-%d" # daily partitions, hive friendly format +compression: "gzip" # compress final objects +framing: + method: "newline_delimited" # new line delimited... +encoding: + codec: "json" # ...JSON +batch: + max_bytes: 10000000 # 10mb uncompressed diff --git a/config/examples/namespacing/sources/apache_logs.toml b/config/examples/namespacing/sources/apache_logs.toml deleted file mode 100644 index 4dd423d3f7de2..0000000000000 --- a/config/examples/namespacing/sources/apache_logs.toml +++ /dev/null @@ -1,5 +0,0 @@ -# Ingest data by tailing one or more files -type = "file" -include = ["/var/log/apache2/*.log"] # supports globbing -ignore_older = 86400 # 1 day - diff --git a/config/examples/namespacing/sources/apache_logs.yaml b/config/examples/namespacing/sources/apache_logs.yaml new file mode 100644 index 0000000000000..5537f1b6f4710 --- /dev/null +++ b/config/examples/namespacing/sources/apache_logs.yaml @@ -0,0 +1,5 @@ +# Ingest data by tailing one or more files +type: "file" +include: # supports globbing + - "/var/log/apache2/*.log" +ignore_older: 86400 # 1 day diff --git a/config/examples/namespacing/transforms/apache_parser.toml b/config/examples/namespacing/transforms/apache_parser.toml deleted file mode 100644 index 01a19c08d9a8d..0000000000000 --- a/config/examples/namespacing/transforms/apache_parser.toml +++ /dev/null @@ -1,7 +0,0 @@ -# Structure and parse the data -inputs = ["apache_logs"] -type = "remap" -drop_on_error = false -source = ''' -. |= parse_apache_log!(string!(.message), "common") -''' diff --git a/config/examples/namespacing/transforms/apache_parser.yaml b/config/examples/namespacing/transforms/apache_parser.yaml new file mode 100644 index 0000000000000..dec90f4bfebac --- /dev/null +++ b/config/examples/namespacing/transforms/apache_parser.yaml @@ -0,0 +1,7 @@ +# Preserve the comments from the original TOML file +# Structure and parse the data +inputs: ["apache_logs"] +type: "remap" +drop_on_error: false +source: | + . |= parse_apache_log!(string!(.message), "common") diff --git a/config/examples/namespacing/transforms/apache_sample.toml b/config/examples/namespacing/transforms/apache_sample.toml deleted file mode 100644 index d9bb8e6d3e3eb..0000000000000 --- a/config/examples/namespacing/transforms/apache_sample.toml +++ /dev/null @@ -1,6 +0,0 @@ -# Sample the data to save on cost -inputs = ["apache_parser"] -type = "sample" -rate = 2 # only keep 50% (1/`rate`) - - diff --git a/config/examples/namespacing/transforms/apache_sample.yaml b/config/examples/namespacing/transforms/apache_sample.yaml new file mode 100644 index 0000000000000..91b789e1016f2 --- /dev/null +++ b/config/examples/namespacing/transforms/apache_sample.yaml @@ -0,0 +1,4 @@ +# Sample the data to save on cost +inputs: ["apache_parser"] +type: "sample" +rate: 2 # only keep 50% (1/`rate`) diff --git a/config/examples/namespacing/vector.toml b/config/examples/namespacing/vector.toml deleted file mode 100644 index 6419d4ea4f413..0000000000000 --- a/config/examples/namespacing/vector.toml +++ /dev/null @@ -1,3 +0,0 @@ -# Set global options -data_dir = "/var/lib/vector" - diff --git a/config/examples/namespacing/vector.yaml b/config/examples/namespacing/vector.yaml new file mode 100644 index 0000000000000..14640214edb4e --- /dev/null +++ b/config/examples/namespacing/vector.yaml @@ -0,0 +1,2 @@ +# Set global options +data_dir: "/var/lib/vector" diff --git a/config/examples/prometheus_to_console.toml b/config/examples/prometheus_to_console.toml deleted file mode 100644 index 8b1e5f22d36c0..0000000000000 --- a/config/examples/prometheus_to_console.toml +++ /dev/null @@ -1,17 +0,0 @@ -# Prometheus source example -# ------------------------------------------------------------------------------ -# Scraping Prometheus metrics and printing them into console - -data_dir = "/var/lib/vector" - -# Ingest -[sources.prometheus] -type = "prometheus_scrape" -hosts = ["http://127.0.0.1:9090", "http://127.0.0.1:9090"] -scrape_interval_secs = 2 - -# Output -[sinks.console] -inputs = ["prometheus"] -type = "console" -encoding.codec = "json" diff --git a/config/examples/prometheus_to_console.yaml b/config/examples/prometheus_to_console.yaml new file mode 100644 index 0000000000000..b8bd75ced04a2 --- /dev/null +++ b/config/examples/prometheus_to_console.yaml @@ -0,0 +1,20 @@ +# Prometheus source example +# ------------------------------------------------------------------------------ +# Scraping Prometheus metrics and printing them into console + +data_dir: "/var/lib/vector" + +# Ingest +sources: + prometheus: + type: "prometheus_scrape" + hosts: ["http://127.0.0.1:9090", "http://127.0.0.1:9090"] + scrape_interval_secs: 2 + +# Output +sinks: + console: + inputs: ["prometheus"] + type: "console" + encoding: + codec: "json" diff --git a/config/examples/stdio.toml b/config/examples/stdio.yaml similarity index 65% rename from config/examples/stdio.toml rename to config/examples/stdio.yaml index 42a839090f817..cbe6429d8afae 100644 --- a/config/examples/stdio.toml +++ b/config/examples/stdio.yaml @@ -5,10 +5,13 @@ # # https://vector.dev/guides/getting-started -[sources.in] - type = "stdin" +sources: + in: + type: "stdin" -[sinks.out] - inputs = ["in"] - type = "console" - encoding.codec = "text" +sinks: + out: + inputs: [ "in" ] + type: "console" + encoding: + codec: "text" diff --git a/config/examples/wrapped_json.toml b/config/examples/wrapped_json.yaml similarity index 54% rename from config/examples/wrapped_json.toml rename to config/examples/wrapped_json.yaml index c8fa911ddc8bd..5659cb39f4ede 100644 --- a/config/examples/wrapped_json.toml +++ b/config/examples/wrapped_json.yaml @@ -4,36 +4,39 @@ # demonstrated wrapped JSON, showing that you can chain VRL expressions # together to quickly traverse nested objects with simple path notation. -data_dir = "/var/lib/vector" +data_dir: "/var/lib/vector" # Ingest data # Example: {"message": "{\"parent\": \"{\\\"child\\\": \\\"value2\\\"}\"}"} # Docs: https://vector.dev/docs/reference/sources/file -[sources.logs] - type = "file" - include = ["/var/log/*.log"] - ignore_older = 86400 # 1 day +sources: + logs: + type: "file" + include: [ "/var/log/*.log" ] + ignore_older: 86400 # 1 day # Parse the data as JSON # Docs: https://vector.dev/docs/reference/transforms/remap -[transforms.parse_json] - inputs = ["logs"] - type = "remap" - drop_on_error = false - source = ''' - message = del(.message) - . |= parse_json!(string!(message)) +transforms: + parse_json: + inputs: [ "logs" ] + type: "remap" + drop_on_error: false + source: | + message = del(.message) + . |= parse_json!(string!(message)) - parent = del(.parent) - . |= parse_json!(string!(parent)) + parent = del(.parent) + . |= parse_json!(string!(parent)) - child = del(.child) - . |= parse_json!(string!(child)) - ''' + child = del(.child) + . |= parse_json!(string!(child)) # Print the data to STDOUT for inspection # Docs: https://vector.dev/docs/reference/sinks/console -[sinks.out] - inputs = ["parse_json"] - type = "console" - encoding.codec = "json" +sinks: + out: + inputs: [ "parse_json" ] + type: "console" + encoding: + codec: "json"