-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathkafkatee.conf.example
352 lines (291 loc) · 17.9 KB
/
kafkatee.conf.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
#######################################################################
# #
# kafkatee configuration file #
# #
# #
#######################################################################
# #
# Syntax: #
# <property-name> = <value> #
# input <type args..> #
# output <type arg..> #
# #
# Boolean property values: #
# >0, "true", "yes", "on", "" - interpreted as true #
# everything else - interpreted as false #
# #
# #
# The configuration file consists of: #
# - Configuration properties (key = value) to control various #
# aspects of kafkatee. #
# - Inputs #
# - Outputs #
# #
#######################################################################
#######################################################################
# #
# Misc configuration #
# #
#######################################################################
# Pid file location
# Default: /run/kafkatee.pid
#pid.file.path = kafkatee.pid
# Daemonize (background)
# Default: true
#daemonize = false
# Logging output level
# 1 = only emergencies .. 6 = info, 7 = debug
# Default: 6 (info)
#log.level = 7
#
# JSON Statistics
#
# Statistics is collected from kafkatee itself(*) as well as librdkafka
# Each JSON object has a top level key of either 'kafkatee' or
# 'kafka' to indicate which type of statistics the object contains.
# Each line is a valid JSON object.
#
# *: kafkatee does not currently output any stats of its own, just from rdkafka.
#
# Statistics output interval
# Defaults to 60 seconds, use 0 to disable.
#log.statistics.interval = 60
# Statistics output file
# Defaults to /tmp/kafkatee.stats.json
#log.statistics.file = /tmp/kafkatee.stats.json
# Command to run on startup, before starting IO.
# Default: none
#command.init = ./my-script.sh
# Command to run on termination after all IOs have been stopped.
# Default: none
#command.term = ./my-cleanup-script.sh
# Set environment variable which will be available for all sub-sequent
# command executions (command.*, input pipe, output pipe, ..)
#setenv.NMSGS=12
# clear:
#setenv.LD_LIBRARY_PATH=
#######################################################################
# #
# Kafka configuration #
# #
# Kafka configuration properties are prefixed with "kafka." #
# and topic properties are prefixed with "kafka.topic.". #
# #
# For the full range of Kafka handle and topic configuration #
# properties, see: #
# http://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md #
# #
# And the Apache Kafka configuration reference: #
# http://kafka.apache.org/08/configuration.html #
# #
#######################################################################
# Initial list of kafka brokers
# Default: none
#kafka.metadata.broker.list = localhost
# Offset file directory.
# Each topic + partition combination has its own offset file.
# Default: current directory
#kafka.topic.offset.store.path = /var/cache/kafkatee/offsets/
# If the request offset was not found on broker, or there is no
# initial offset known (no stored offset), then reset the offset according
# to this configuration.
# Values: smallest (oldest/beginning), largest (newest/end), error (fail)
# Default: largest
#kafka.topic.auto.offset.reset = smallest
# Maximum message size.
# Should be synchronized on all producers, brokers and consumers.
# Default: 4000000
#kafka.message.max.bytes = 10000000
# Kafka debugging
# Default: none
#kafka.debug = msg,topic,broker
#######################################################################
# #
# Message transformation #
# #
# A message read from one of the inputs may be transformed before #
# being enqueued on the output queues. #
# #
# Transformation requires that the input and output encoding differs, #
# i.e., 'input [encoding=json] ..' and 'output.encoding=string' #
# #
# While the input encoding is configured per input, the output #
# encoding is configured globally, all outputs will receive the same #
# message. #
# #
# The currently supported transformation(s) are: #
# JSON input -> string output: #
# JSON data is formatted according to the output.format #
# configuration. The %{field} syntax references the field in the #
# original JSON object by the same name and outputs its value. #
# #
# If the input and output encoding matches then the message remains #
# untouched. #
# #
# The output message delimiter (defaults to newline (\n)) is #
# configurable (output.delimiter) and always appended to all output #
# messages regardless of transformation. #
# The input is always stripped of its delimiter (which is newline #
# for pipe inputs). #
# #
#######################################################################
# Output encoding: string or json
# Default: string
#output.encoding = string
#######################################################################
# Output formatting #
# #
# The format string is made up of %{..}-tokens and literals. #
# #
# Tokens: #
# #
# %{FIELD} #
# Retrieves the value from the JSON object's field with the #
# same name. #
# #
# %{FIELD?DEFAULT} #
# 'DEFAULT' is the default string to use if no field was matched, #
# the default default string is "-". #
# #
# Literals are copied verbatim to the output string. #
# #
# Example JSON: {"task":19, "name":"Mike"} #
# Example format: Got task %{task} for user %{name?unknown} #
# Example output: Got task 19 for user Mike #
# #
# Note: Multi-level JSON objects are flattened: #
# JSON: {"a": {"b": 9}, "c": "hi"} #
# Gives: { "b": 9, "c": "hi" } #
# #
#######################################################################
# Output format for JSON -> string transformation.
# Default: none
#output.format = %{hostname} %{sequence} %{dt} %{time_firstbyte} %{ip} %{handling}/%{http_status} %{bytes_sent} %{request_method} http://%{host}%{uri}%{query} - %{mime_type} %{referer} %{x_forwarded_for} %{user_agent} %{accept_language} %{x_analytics}
# Output delimiter
# The output message ends with this delimiter.
# Supports \n, \r, \t, \0.
# Default: newline (\n)
#output.delimiter = ;END;\n
# Maximum queue size for each output, in number of messages
# Default: 100000
#output.queue.size = 1000000
#######################################################################
# #
# Inputs #
# #
# The following types of inputs are supported: #
# - Kafka consumer #
# - Piped command #
# #
# Any number and mix of inputs can be configured. #
# Each input may be configured with an optional list of #
# input-specific configuration properties, called the key-values. #
# #
# Supported key-values: #
# - encoding=string|json - message encoding from this input. #
# Default: string #
# #
# - stop.eof=true|false - do not continue try reading from #
# this input when EOF has been reached #
# Default: false #
# #
# - stop.error=true|false - do not reopen/restart input on error. #
# Default: false #
# #
# The key-values is CSV-separated and the list of key-values must be #
# enveloped by brackets: [encoding=string,foo=bar] #
# #
#######################################################################
#######################################################################
# Kafka consumer input syntax: #
# input [key-values] kafka topic <topic> partition <N> from <offset> #
# #
# where: #
# - [key-values] is an optional CSV-separated list of key-values. #
# NOTE: the enveloping brackets are literal. #
# - <topic> is the Kafka topic to consume from. #
# - <N> or <N>-<M> is the partition, or range of partitions, to #
# consume from. #
# - <offset> is the offset to start consuming from. #
# supported values: beginning, end, stored, <number> #
# Where 'stored' means to use a local offset file to store and #
# read the offset from, which allows a later run of kafkatee #
# to pick up from where it left off. #
# #
#######################################################################
#input [encoding=json] kafka topic varnish partition 0-10 from stored
#input [encoding=string] kafka topic test1 partition 0 from end
#######################################################################
# Piped command input syntax: #
# input [key-values] pipe <command ...> #
# #
# where: #
# - [key-values] is an optional CSV-separated list of key-values. #
# NOTE: the enveloping brackets are literal. #
# - <command ...> is a command string that will be executed with: #
# /bin/sh -c "<command ...>", thus supporting pipes, etc. #
# #
#######################################################################
#input [encoding=string] pipe tail -f a.log | grep -v ^DEBUG:
#input pipe wget -q -O- http://example.com/api/events
#######################################################################
# #
# Outputs #
# #
# The following types of outputs are supported: #
# - Piped command #
# - File #
# #
# Each output has its own queue where messages are enqueued prior to #
# writing to the output, this queue is limited by output.queue.size. #
# If the queue limit is reached no new messages are added to the #
# queue (tail-drop). #
# #
# Outputs are configured with a sample rate, 1 means every message, #
# 2 means every other message, 1000 means every 1000nd message, and #
# so on. #
# $
# If an output process terminates, or an output file fails writing, #
# the output is closed and reopened/restarted. The messages in the #
# output's queue remain in the queue while the output is unavailable. #
# #
#######################################################################
#######################################################################
# Piped command output syntax: #
# output pipe <sample-rate> <command ...> #
# #
# where: #
# - <sample-rate> is the sample-rate: 1 for each message, 100 for #
# every 100rd message, and so on. #
# - <command ...> is a command string that will be executed with: #
# /bin/sh -c "<command ...>", thus supporting pipes, etc. #
# #
# Output pipes are stopped and restarted if kafkatee receives a #
# SIGHUP signal. #
#######################################################################
#output pipe 1 grep ^Something >> somefile.log
#output pipe 1000 do-some-stats.sh
#output pipe 1 nc -u syslog.example.com 514
#######################################################################
# File output syntax: #
# output file <sample-rate> <path> #
# #
# where: #
# - <sample-rate> is the sample-rate: 1 for each message, 100 for #
# every 100rd message, and so on. #
# - <path> is the file path to write. The file is appended. #
# #
# Output files are closed and reopened if kafkatee receives a #
# SIGHUP signal, thus allowing log file rotation. #
#######################################################################
#output file 100 /tmp/sampled-100.txt
#######################################################################
# Include other config files using include clause. #
# You may use file glob matching to include files, e.g. #
# include /etc/kafkatee.d/*.conf #
# #
# These files will be include in alphabetic order and override #
# any previously set configs. #
#######################################################################
# Include other config file
#include local.conf