forked from vectordotdev/vector
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
chores(examples): Convert config/examples from TOML to YAML (vectordo…
…tdev#18832) * chores(examples): Convert examples from TOML to YAML * removed trailing whitespaces
- Loading branch information
Showing
25 changed files
with
317 additions
and
291 deletions.
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
# Set global options | ||
"data_dir": "/var/lib/vector" | ||
|
||
# Ingest data by tailing one or more files | ||
"sources": | ||
"apache_logs": | ||
"type": "file" | ||
"include": [ "/var/log/apache2/*.log" ] # supports globbing | ||
"ignore_older": 86400 # 1 day | ||
|
||
# Structure and parse the data | ||
"transforms": | ||
"apache_parser": | ||
"inputs": [ "apache_logs" ] | ||
"type": "remap" | ||
"drop_on_error": false | ||
"source": ''' | ||
. = parse_apache_log!(.message) | ||
''' | ||
|
||
"apache_sample": | ||
"inputs": [ "apache_parser" ] | ||
"type": "sample" | ||
"rate": 2 # only keep 50% (1/`rate`) | ||
|
||
# Send structured data to a short-term storage | ||
"sinks": | ||
"es_cluster": | ||
"inputs": [ "apache_sample" ] # only take sampled data | ||
"type": "elasticsearch" | ||
"endpoint": "http://79.12.221.222:9200" # local or external host | ||
"bulk": | ||
"index": "vector-%Y-%m-%d" # daily indices | ||
|
||
# Send structured data to a cost-effective long-term storage | ||
"s3_archives": | ||
"inputs": [ "apache_parser" ] # don't sample for S3 | ||
"type": "aws_s3" | ||
"region": "us-east-1" | ||
"bucket": "my-log-archives" | ||
"key_prefix": "date=%Y-%m-%d" # daily partitions, hive friendly format | ||
"compression": "gzip" # compress final objects | ||
"framing": | ||
"method": "newline_delimited" # new line delimited... | ||
"encoding": | ||
"codec": "json" # ...JSON | ||
"batch": | ||
"max_bytes": 10000000 # 10mb uncompressed |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
# Elasticsearch / S3 Hybrid Vector Configuration Example | ||
# ------------------------------------------------------------------------------ | ||
# This demonstrates a hybrid pipeline, writing data to both Elasticsearch and | ||
# AWS S3. This is advantageous because each storage helps to offset its | ||
# counterpart's weaknesses. You can provision Elasticsearch for performance | ||
# and delegate durability to S3. | ||
|
||
data_dir: "/var/lib/vector" | ||
|
||
# Ingest data by tailing one or more files | ||
# Docs: https://vector.dev/docs/reference/sources/file | ||
sources: | ||
apache_logs: | ||
type: "file" | ||
include: ["/var/log/*.log"] | ||
ignore_older: 86400 # 1 day | ||
|
||
# Optionally parse, structure and transform data here. | ||
# Docs: https://vector.dev/docs/reference/transforms | ||
|
||
# Send structured data to Elasticsearch for searching of recent data | ||
sinks: | ||
es_cluster: | ||
inputs: ["apache_logs"] | ||
type: "elasticsearch" | ||
endpoint: "79.12.221.222:9200" | ||
doc_type: "_doc" | ||
|
||
# Send structured data to S3, a durable long-term storage | ||
s3_archives: | ||
inputs: ["apache_logs"] # don't sample | ||
type: "aws_s3" | ||
region: "us-east-1" | ||
bucket: "my_log_archives" | ||
framing: | ||
method: "newline_delimited" | ||
encoding: | ||
codec: "json" | ||
compression: "gzip" | ||
batch: | ||
max_size: 10000000 # 10mb uncompressed |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
# Parsing logs as metrics and sending to CloudWatch | ||
# ------------------------------------------------------------------------------ | ||
# WIP | ||
|
||
data_dir: "/var/lib/vector" | ||
|
||
# Ingest | ||
sources: | ||
file: | ||
type: "file" | ||
include: [ "sample.log" ] | ||
start_at_beginning: true | ||
|
||
# Structure and parse the data | ||
transforms: | ||
remap: | ||
inputs: [ "file" ] | ||
type: "remap" | ||
drop_on_error: false | ||
source: | | ||
. |= parse_apache_log!(string!(.message), "common") | ||
# Transform into metrics | ||
log_to_metric: | ||
inputs: [ "remap" ] | ||
type: "log_to_metric" | ||
metrics: | ||
- type: "counter" | ||
increment_by_value: true | ||
field: "bytes_out" | ||
tags: | ||
method: "{{method}}" | ||
status: "{{status}}" | ||
|
||
# Output data | ||
sinks: | ||
console_metrics: | ||
inputs: [ "log_to_metric" ] | ||
type: "console" | ||
encoding: | ||
codec: "json" | ||
|
||
console_logs: | ||
inputs: [ "remap" ] | ||
type: "console" | ||
encoding: | ||
codec: "json" | ||
|
||
cloudwatch: | ||
inputs: [ "log_to_metric" ] | ||
type: "aws_cloudwatch_metrics" | ||
namespace: "vector" | ||
endpoint: "http://localhost:4566" |
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.