Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cherry-pick #7997 to 6.x: Add tag "multiline" to "log.flags" if event consists of multiple lines. #8207

Merged
merged 1 commit into from
Sep 11, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ https://github.com/elastic/beats/compare/v6.4.0...6.x[Check the HEAD diff]

- Add tag "truncated" to "log.flags" if incoming line is longer than configured limit. {pull}7991[7991]
- Add haproxy module. {pull}8014[8014]
- Add tag "multiline" to "log.flags" if event consists of multiple lines. {pull}7997[7997]

*Heartbeat*

Expand Down
15 changes: 15 additions & 0 deletions filebeat/module/elasticsearch/server/test/test.log-expected.json
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,9 @@
"fileset.module": "elasticsearch",
"fileset.name": "server",
"input.type": "log",
"log.flags": [
"multiline"
],
"log.level": "WARN",
"message": "master left (reason = failed to ping, tried [3] times, each with maximum [30s] timeout), current nodes: nodes:\n {srvmulpvlsk252_md}{uc5xdiQgRhaBIY-sszgjvQ}{X9pC0t1UQQix_NNOM0J6JQ}{srvmulpvlsk252.loganalytics.santanderuk.corp}{180.39.9.93:9300}{ml.max_open_jobs=10, ml.enabled=true}, local\n {srvmulpvlsk258_md}{HgW6EDn5QCmWVmICy4saHw}{o8zku7OJR4CTp0IjY8Ag4Q}{srvmulpvlsk258.loganalytics.santanderuk.corp}{180.39.9.99:9300}{ml.max_open_jobs=10, ml.enabled=true}\n {srvmulpvlsk250_md}{igrwSoPGSJ6u_5b8k26tgQ}{PuRqciBFRbiQvL2_lS7LrQ}{srvmulpvlsk250.loganalytics.santanderuk.corp}{180.39.9.91:9300}{ml.max_open_jobs=10, ml.enabled=true}, master\n {srvmulpvlsk254_id}{wZYeAh2URc2NwBIHZolLWQ}{3nduupo-TzSPaXjQaNu4Sg}{srvmulpvlsk254.loganalytics.santanderuk.corp}{180.39.9.95:9300}{ml.max_open_jobs=10, ml.enabled=true}",
"offset": 2008,
Expand All @@ -179,6 +182,9 @@
"fileset.module": "elasticsearch",
"fileset.name": "server",
"input.type": "log",
"log.flags": [
"multiline"
],
"log.level": "WARN",
"message": "path: /_xpack/monitoring/_bulk, params: {system_id=logstash, system_api_version=2, interval=1s}\norg.elasticsearch.cluster.block.ClusterBlockException: blocked by: [SERVICE_UNAVAILABLE/2/no master];\n at org.elasticsearch.cluster.block.ClusterBlocks.globalBlockedException(ClusterBlocks.java:165) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.cluster.block.ClusterBlocks.globalBlockedRaiseException(ClusterBlocks.java:151) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.monitoring.action.TransportMonitoringBulkAction.doExecute(TransportMonitoringBulkAction.java:57) ~[?:?]\n at org.elasticsearch.xpack.monitoring.action.TransportMonitoringBulkAction.doExecute(TransportMonitoringBulkAction.java:40) ~[?:?]\n at org.elasticsearch.action.support.TransportAction.doExecute(TransportAction.java:146) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.TransportAction$RequestFilterChain.proceed(TransportAction.java:170) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.lambda$apply$1(SecurityActionFilter.java:133) ~[?:?]\n at org.elasticsearch.action.ActionListener$1.onResponse(ActionListener.java:59) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.lambda$authorizeRequest$4(SecurityActionFilter.java:208) ~[?:?]\n at org.elasticsearch.xpack.security.authz.AuthorizationUtils$AsyncAuthorizer.maybeRun(AuthorizationUtils.java:127) ~[?:?]\n at org.elasticsearch.xpack.security.authz.AuthorizationUtils$AsyncAuthorizer.setRunAsRoles(AuthorizationUtils.java:121) ~[?:?]\n at org.elasticsearch.xpack.security.authz.AuthorizationUtils$AsyncAuthorizer.authorize(AuthorizationUtils.java:109) ~[?:?]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.authorizeRequest(SecurityActionFilter.java:210) ~[?:?]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.lambda$applyInternal$3(SecurityActionFilter.java:186) ~[?:?]\n at org.elasticsearch.action.ActionListener$1.onResponse(ActionListener.java:59) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.lambda$authenticateAsync$2(AuthenticationService.java:212) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.lambda$lookForExistingAuthentication$4(AuthenticationService.java:246) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.lookForExistingAuthentication(AuthenticationService.java:257) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.authenticateAsync(AuthenticationService.java:210) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.access$000(AuthenticationService.java:159) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService.authenticate(AuthenticationService.java:122) ~[?:?]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.applyInternal(SecurityActionFilter.java:185) ~[?:?]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.apply(SecurityActionFilter.java:145) ~[?:?]\n at org.elasticsearch.action.support.TransportAction$RequestFilterChain.proceed(TransportAction.java:168) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.TransportAction.execute(TransportAction.java:142) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.TransportAction.execute(TransportAction.java:84) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.node.NodeClient.executeLocally(NodeClient.java:83) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.node.NodeClient.doExecute(NodeClient.java:72) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.support.AbstractClient.execute(AbstractClient.java:408) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:80) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.monitoring.rest.action.RestMonitoringBulkAction.lambda$doPrepareRequest$0(RestMonitoringBulkAction.java:77) ~[?:?]\n at org.elasticsearch.rest.BaseRestHandler.handleReques",
"offset": 2907,
Expand All @@ -191,6 +197,9 @@
"fileset.module": "elasticsearch",
"fileset.name": "server",
"input.type": "log",
"log.flags": [
"multiline"
],
"log.level": "WARN",
"message": "path: /_xpack/license, params: {}\norg.elasticsearch.discovery.MasterNotDiscoveredException: NodeDisconnectedException[[srvmulpvlsk250_md][180.39.9.91:9300][cluster:monitor/xpack/license/get] disconnected]\n at org.elasticsearch.action.support.master.TransportMasterNodeAction$AsyncSingleAction$4.onTimeout(TransportMasterNodeAction.java:209) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.cluster.ClusterStateObserver$ContextPreservingListener.onTimeout(ClusterStateObserver.java:311) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.cluster.ClusterStateObserver.waitForNextChange(ClusterStateObserver.java:139) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.cluster.ClusterStateObserver.waitForNextChange(ClusterStateObserver.java:111) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.master.TransportMasterNodeAction$AsyncSingleAction.retry(TransportMasterNodeAction.java:194) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.master.TransportMasterNodeAction$AsyncSingleAction.access$500(TransportMasterNodeAction.java:107) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.master.TransportMasterNodeAction$AsyncSingleAction$3.handleException(TransportMasterNodeAction.java:183) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.transport.TransportService$ContextRestoreResponseHandler.handleException(TransportService.java:1067) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.transport.TransportService$ContextRestoreResponseHandler.handleException(TransportService.java:1067) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.transport.TransportService$Adapter.lambda$onConnectionClosed$6(TransportService.java:893) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingRunnable.run(ThreadContext.java:569) [elasticsearch-5.6.3.jar:5.6.3]\n at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_161]\n at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_161]\n at java.lang.Thread.run(Thread.java:748) [?:1.8.0_161]\nCaused by: org.elasticsearch.transport.NodeDisconnectedException: [srvmulpvlsk250_md][180.39.9.91:9300][cluster:monitor/xpack/license/get] disconnected",
"offset": 7412,
Expand All @@ -206,6 +215,9 @@
"fileset.module": "elasticsearch",
"fileset.name": "server",
"input.type": "log",
"log.flags": [
"multiline"
],
"log.level": "WARN",
"message": "duration [3.8s], collections [1]/[4.3s], total [3.8s]/[8.8h], memory [16.5gb]->[15.7gb]/[30.8gb], all_po\nols {[young] [1.2gb]->[24mb]/[1.4gb]}{[survivor] [191.3mb]->[191.3mb]/[191.3mb]}{[old] [15.1gb]->[15.5gb]/[29.1gb]}",
"offset": 9873,
Expand Down Expand Up @@ -246,6 +258,9 @@
"fileset.module": "elasticsearch",
"fileset.name": "server",
"input.type": "log",
"log.flags": [
"multiline"
],
"log.level": "WARN",
"message": "monitoring execution failed\norg.elasticsearch.xpack.monitoring.exporter.ExportException: Exception when closing export bulk\n at org.elasticsearch.xpack.monitoring.exporter.ExportBulk$1$1.<init>(ExportBulk.java:106) ~[?:?]\n at org.elasticsearch.xpack.monitoring.exporter.ExportBulk$1.onFailure(ExportBulk.java:104) ~[?:?]\n at org.elasticsearch.xpack.monitoring.exporter.ExportBulk$Compound$1.onResponse(ExportBulk.java:217) ~[?:?]\n at org.elasticsearch.xpack.monitoring.exporter.ExportBulk$Compound$1.onResponse(ExportBulk.java:211) ~[?:?]\n at org.elasticsearch.xpack.common.IteratingActionListener.onResponse(IteratingActionListener.java:108) ~[?:?]\n at org.elasticsearch.action.ActionListener$1.onResponse(ActionListener.java:59) [elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.monitoring.exporter.http.HttpExportBulk$1.onSuccess(HttpExportBulk.java:115) [x-pack-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.RestClient$FailureTrackingResponseListener.onSuccess(RestClient.java:597) [elasticsearch-rest-client-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.RestClient$1.completed(RestClient.java:352) [elasticsearch-rest-client-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.RestClient$1.completed(RestClient.java:343) [elasticsearch-rest-client-5.6.3.jar:5.6.3]\n at org.apache.http.concurrent.BasicFuture.completed(BasicFuture.java:119) [httpcore-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.client.DefaultClientExchangeHandlerImpl.responseCompleted(DefaultClientExchangeHandlerImpl.java:177) [httpasyncclient-4.1.2.jar:4.1.2]\n at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.processResponse(HttpAsyncRequestExecutor.java:436) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.inputReady(HttpAsyncRequestExecutor.java:326) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.DefaultNHttpClientConnection.consumeInput(DefaultNHttpClientConnection.java:265) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:81) [httpasyncclient-4.1.2.jar:4.1.2]\n at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:39) [httpasyncclient-4.1.2.jar:4.1.2]\n at org.apache.http.impl.nio.reactor.AbstractIODispatch.inputReady(AbstractIODispatch.java:114) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.BaseIOReactor.readable(BaseIOReactor.java:162) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvent(AbstractIOReactor.java:337) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvents(AbstractIOReactor.java:315) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:276) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.BaseIOReactor.execute(BaseIOReactor.java:104) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.AbstractMultiworkerIOReactor$Worker.run(AbstractMultiworkerIOReactor.java:588) [httpcore-nio-4.4.5.jar:4.4.5]\n at java.lang.Thread.run(Thread.java:748) [?:1.8.0_161]\n",
"offset": 10648,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,9 @@
"fileset.module": "elasticsearch",
"fileset.name": "slowlog",
"input.type": "log",
"log.flags": [
"multiline"
],
"log.level": "INFO",
"message": "[2018-07-04T21:51:30,411][INFO ][index.indexing.slowlog.index] [v_VJhjV] [metricbeat-6.3.0-2018.07.04/VLKxBLvUSYuIMKzpacGjRg] took[1.7ms], took_millis[1], type[doc], id[s01HZ2QBk9jw4gtgaFtn], routing[], source[\n{\n \"@timestamp\":\"2018-07-04T21:27:30.730Z\",\n \"metricset\":{\n \"name\":\"network\",\n \"module\":\"system\",\n \"rtt\":7264},\n \"system\":{\n \"network\":{\n \"name\":\"lo0\",\n \"in\":{\n \"errors\":0,\n \"dropped\":0,\n \"bytes\":77666873,\n \"packets\":244595},\n \"out\":{\n \"packets\":244595,\n \"bytes\":77666873,\n \"errors\":0,\n \"dropped\":0\n }\n }\n },\n \"beat\":{\n \"name\":\"Rados-MacBook-Pro.local\",\n \"hostname\":\"Rados-MacBook-Pro.local\",\n \"version\":\"6.3.0\"\n },\n \"host\":{\n \"name\":\"Rados-MacBook-Pro.local\"\n }\n }]",
"offset": 4753,
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/icinga/main/test/test.log-expected.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
"icinga.main.message": "Notification command for object 'demo!load' (PID: 19401, arguments: '/etc/icinga2/scripts/mail-service-notification.sh') terminated with exit code 127, output: /etc/icinga2/scripts/mail-service-notification.sh: 20: /etc/icinga2/scripts/mail-service-notification.sh: mail: not found\n/usr/bin/printf: write error: Broken pipe\n",
"icinga.main.severity": "warning",
"input.type": "log",
"log.flags": [
"multiline"
],
"offset": 133,
"prospector.type": "log"
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@
"fileset.module": "postgresql",
"fileset.name": "log",
"input.type": "log",
"log.flags": [
"multiline"
],
"message": "2017-07-31 13:36:43.557 CEST [4983] postgres@postgres LOG: duration: 37.118 ms statement: SELECT d.datname as \"Name\",\n\t pg_catalog.pg_get_userbyid(d.datdba) as \"Owner\",\n\t pg_catalog.pg_encoding_to_char(d.encoding) as \"Encoding\",\n\t d.datcollate as \"Collate\",\n\t d.datctype as \"Ctype\",\n\t pg_catalog.array_to_string(d.datacl, E'\\n') AS \"Access privileges\"\n\tFROM pg_catalog.pg_database d\n\tORDER BY 1;",
"offset": 445,
"postgresql.log.database": "postgres",
Expand All @@ -93,6 +96,9 @@
"fileset.module": "postgresql",
"fileset.name": "log",
"input.type": "log",
"log.flags": [
"multiline"
],
"message": "2017-07-31 13:36:44.104 CEST [4986] postgres@postgres LOG: duration: 2.895 ms statement: SELECT d.datname as \"Name\",\n\t pg_catalog.pg_get_userbyid(d.datdba) as \"Owner\",\n\t pg_catalog.pg_encoding_to_char(d.encoding) as \"Encoding\",\n\t d.datcollate as \"Collate\",\n\t d.datctype as \"Ctype\",\n\t pg_catalog.array_to_string(d.datacl, E'\\n') AS \"Access privileges\"\n\tFROM pg_catalog.pg_database d\n\tORDER BY 1;",
"offset": 873,
"postgresql.log.database": "postgres",
Expand All @@ -110,6 +116,9 @@
"fileset.module": "postgresql",
"fileset.name": "log",
"input.type": "log",
"log.flags": [
"multiline"
],
"message": "2017-07-31 13:36:44.642 CEST [4989] postgres@postgres LOG: duration: 2.809 ms statement: SELECT d.datname as \"Name\",\n\t pg_catalog.pg_get_userbyid(d.datdba) as \"Owner\",\n\t pg_catalog.pg_encoding_to_char(d.encoding) as \"Encoding\",\n\t d.datcollate as \"Collate\",\n\t d.datctype as \"Ctype\",\n\t pg_catalog.array_to_string(d.datacl, E'\\n') AS \"Access privileges\"\n\tFROM pg_catalog.pg_database d\n\tORDER BY 1;",
"offset": 1300,
"postgresql.log.database": "postgres",
Expand Down Expand Up @@ -159,6 +168,9 @@
"fileset.module": "postgresql",
"fileset.name": "log",
"input.type": "log",
"log.flags": [
"multiline"
],
"message": "2017-07-31 13:39:21.025 CEST [5404] postgres@postgres LOG: duration: 37.598 ms statement: SELECT n.nspname as \"Schema\",\n\t c.relname as \"Name\",\n\t CASE c.relkind WHEN 'r' THEN 'table' WHEN 'v' THEN 'view' WHEN 'm' THEN 'materialized view' WHEN 'i' THEN 'index' WHEN 'S' THEN 'sequence' WHEN 's' THEN 'special' WHEN 'f' THEN 'foreign table' END as \"Type\",\n\t pg_catalog.pg_get_userbyid(c.relowner) as \"Owner\"\n\tFROM pg_catalog.pg_class c\n\t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace\n\tWHERE c.relkind IN ('r','')\n\t AND n.nspname <> 'pg_catalog'\n\t AND n.nspname <> 'information_schema'\n\t AND n.nspname !~ '^pg_toast'\n\t AND pg_catalog.pg_table_is_visible(c.oid)\n\tORDER BY 1,2;",
"offset": 1907,
"postgresql.log.database": "postgres",
Expand Down Expand Up @@ -210,6 +222,9 @@
"fileset.module": "postgresql",
"fileset.name": "log",
"input.type": "log",
"log.flags": [
"multiline"
],
"message": "2017-07-31 13:40:54.310 CEST [5502] postgres@clients LOG: duration: 26.082 ms statement: SELECT n.nspname as \"Schema\",\n\t c.relname as \"Name\",\n\t CASE c.relkind WHEN 'r' THEN 'table' WHEN 'v' THEN 'view' WHEN 'm' THEN 'materialized view' WHEN 'i' THEN 'index' WHEN 'S' THEN 'sequence' WHEN 's' THEN 'special' WHEN 'f' THEN 'foreign table' END as \"Type\",\n\t pg_catalog.pg_get_userbyid(c.relowner) as \"Owner\"\n\tFROM pg_catalog.pg_class c\n\t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace\n\tWHERE c.relkind IN ('r','')\n\t AND n.nspname <> 'pg_catalog'\n\t AND n.nspname <> 'information_schema'\n\t AND n.nspname !~ '^pg_toast'\n\t AND pg_catalog.pg_table_is_visible(c.oid)\n\tORDER BY 1,2;",
"offset": 2847,
"postgresql.log.database": "clients",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
"fileset.module": "system",
"fileset.name": "syslog",
"input.type": "log",
"log.flags": [
"multiline"
],
"offset": 0,
"prospector.type": "log",
"system.syslog.hostname": "a-mac-with-esc-key",
Expand Down
4 changes: 4 additions & 0 deletions filebeat/reader/multiline/multiline.go
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,10 @@ func (mlr *Reader) finalize() reader.Message {
mlr.message.AddFlagsWithKey("log.flags", "truncated")
}

if mlr.numLines > 1 {
mlr.message.AddFlagsWithKey("log.flags", "multiline")
}

// Copy message from existing content
msg := mlr.message

Expand Down
5 changes: 5 additions & 0 deletions filebeat/reader/multiline/multiline_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,7 @@ func testMultilineTruncated(t *testing.T, cfg Config, events int, truncated bool

for _, message := range messages {
found := false
multiline := false
statusFlags, err := message.Fields.GetValue("log.flags")
if err != nil {
if !truncated {
Expand All @@ -247,6 +248,9 @@ func testMultilineTruncated(t *testing.T, cfg Config, events int, truncated bool
if f == "truncated" {
found = true
}
if f == "multiline" {
multiline = true
}
}
default:
t.Fatalf("incorrect type for log.flags")
Expand All @@ -257,6 +261,7 @@ func testMultilineTruncated(t *testing.T, cfg Config, events int, truncated bool
} else {
assert.False(t, found)
}
assert.True(t, multiline)
}
}

Expand Down
Loading