Skip to content

Commit

Permalink
chores(examples): Convert config/examples from TOML to YAML (vectordo…
Browse files Browse the repository at this point in the history
…tdev#18832)

* chores(examples): Convert examples from TOML to YAML

* removed trailing whitespaces
  • Loading branch information
pront authored Oct 13, 2023
1 parent efb0d1a commit 6ffb072
Show file tree
Hide file tree
Showing 25 changed files with 317 additions and 291 deletions.
44 changes: 0 additions & 44 deletions config/examples/docs_example.toml

This file was deleted.

48 changes: 48 additions & 0 deletions config/examples/docs_example.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Set global options
"data_dir": "/var/lib/vector"

# Ingest data by tailing one or more files
"sources":
"apache_logs":
"type": "file"
"include": [ "/var/log/apache2/*.log" ] # supports globbing
"ignore_older": 86400 # 1 day

# Structure and parse the data
"transforms":
"apache_parser":
"inputs": [ "apache_logs" ]
"type": "remap"
"drop_on_error": false
"source": '''
. = parse_apache_log!(.message)
'''

"apache_sample":
"inputs": [ "apache_parser" ]
"type": "sample"
"rate": 2 # only keep 50% (1/`rate`)

# Send structured data to a short-term storage
"sinks":
"es_cluster":
"inputs": [ "apache_sample" ] # only take sampled data
"type": "elasticsearch"
"endpoint": "http://79.12.221.222:9200" # local or external host
"bulk":
"index": "vector-%Y-%m-%d" # daily indices

# Send structured data to a cost-effective long-term storage
"s3_archives":
"inputs": [ "apache_parser" ] # don't sample for S3
"type": "aws_s3"
"region": "us-east-1"
"bucket": "my-log-archives"
"key_prefix": "date=%Y-%m-%d" # daily partitions, hive friendly format
"compression": "gzip" # compress final objects
"framing":
"method": "newline_delimited" # new line delimited...
"encoding":
"codec": "json" # ...JSON
"batch":
"max_bytes": 10000000 # 10mb uncompressed
Original file line number Diff line number Diff line change
Expand Up @@ -5,29 +5,34 @@
# Variables section in our docs:
#
# https://vector.dev/docs/setup/configuration#environment-variables

data_dir = "/var/lib/vector"
data_dir: "/var/lib/vector"

# Ingests Apache 2 log data by tailing one or more log files
# Example: 194.221.90.140 - - [22/06/2019:11:55:14 -0400] "PUT /integrate" 100 2213
# Docs: https://vector.dev/docs/reference/sources/file
[sources.apache_logs]
type = "file"
include = ["/var/log/apache2/*.log"]
ignore_older = 86400 # 1 day
sources:
apache_logs:
type: "file"
include: [ "/var/log/apache2/*.log" ]
# ignore files older than 1 day
ignore_older: 86400

# Add a field based on the value of the HOSTNAME env var
# Docs: https://vector.dev/docs/reference/transforms/remap
[transforms.add_host]
inputs = ["apache_logs"]
type = "remap"
source = '''
.host = get_env_var!("HOSTNAME")
'''
transforms:
add_host:
inputs: [ "apache_logs" ]
type: "remap"
source: |
'''
.host = get_env_var!("HOSTNAME")
'''
# Print the data to STDOUT for inspection
# Docs: https://vector.dev/docs/reference/sinks/console
[sinks.out]
inputs = ["add_host"]
type = "console"
encoding.codec = "json"
sinks:
out:
inputs: [ "add_host" ]
type: "console"
encoding:
codec: "json"
37 changes: 0 additions & 37 deletions config/examples/es_s3_hybrid.toml

This file was deleted.

41 changes: 41 additions & 0 deletions config/examples/es_s3_hybrid.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# Elasticsearch / S3 Hybrid Vector Configuration Example
# ------------------------------------------------------------------------------
# This demonstrates a hybrid pipeline, writing data to both Elasticsearch and
# AWS S3. This is advantageous because each storage helps to offset its
# counterpart's weaknesses. You can provision Elasticsearch for performance
# and delegate durability to S3.

data_dir: "/var/lib/vector"

# Ingest data by tailing one or more files
# Docs: https://vector.dev/docs/reference/sources/file
sources:
apache_logs:
type: "file"
include: ["/var/log/*.log"]
ignore_older: 86400 # 1 day

# Optionally parse, structure and transform data here.
# Docs: https://vector.dev/docs/reference/transforms

# Send structured data to Elasticsearch for searching of recent data
sinks:
es_cluster:
inputs: ["apache_logs"]
type: "elasticsearch"
endpoint: "79.12.221.222:9200"
doc_type: "_doc"

# Send structured data to S3, a durable long-term storage
s3_archives:
inputs: ["apache_logs"] # don't sample
type: "aws_s3"
region: "us-east-1"
bucket: "my_log_archives"
framing:
method: "newline_delimited"
encoding:
codec: "json"
compression: "gzip"
batch:
max_size: 10000000 # 10mb uncompressed
48 changes: 0 additions & 48 deletions config/examples/file_to_cloudwatch_metrics.toml

This file was deleted.

53 changes: 53 additions & 0 deletions config/examples/file_to_cloudwatch_metrics.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
# Parsing logs as metrics and sending to CloudWatch
# ------------------------------------------------------------------------------
# WIP

data_dir: "/var/lib/vector"

# Ingest
sources:
file:
type: "file"
include: [ "sample.log" ]
start_at_beginning: true

# Structure and parse the data
transforms:
remap:
inputs: [ "file" ]
type: "remap"
drop_on_error: false
source: |
. |= parse_apache_log!(string!(.message), "common")
# Transform into metrics
log_to_metric:
inputs: [ "remap" ]
type: "log_to_metric"
metrics:
- type: "counter"
increment_by_value: true
field: "bytes_out"
tags:
method: "{{method}}"
status: "{{status}}"

# Output data
sinks:
console_metrics:
inputs: [ "log_to_metric" ]
type: "console"
encoding:
codec: "json"

console_logs:
inputs: [ "remap" ]
type: "console"
encoding:
codec: "json"

cloudwatch:
inputs: [ "log_to_metric" ]
type: "aws_cloudwatch_metrics"
namespace: "vector"
endpoint: "http://localhost:4566"
65 changes: 0 additions & 65 deletions config/examples/file_to_prometheus.toml

This file was deleted.

Loading

0 comments on commit 6ffb072

Please sign in to comment.