From 2b6e52d45733abfe27815523402bf60cda510cfe Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 8 Nov 2018 21:23:57 -0500 Subject: [PATCH 1/3] [Rollup] Add more diagnostic stats to job To help debug future performance issues, this adds the min/max/avg/count/total latencies (in milliseconds) for search and bulk phase. This latency is the total service time including transfer between nodes, not just the `took` time. It also adds the count of search/bulk failures encountered during runtime. This information is also in the log, but a runtime counter will help expose problems faster --- .../client/rollup/GetRollupJobResponse.java | 166 +++++++++++++++++- .../rollup/GetRollupJobResponseTests.java | 21 ++- docs/reference/rollup/apis/get-job.asciidoc | 54 +++++- .../core/indexing/AsyncTwoPhaseIndexer.java | 17 +- .../xpack/core/indexing/IndexerJobStats.java | 78 +++++++- .../xpack/core/indexing/StatsAccumulator.java | 138 +++++++++++++++ .../rollup/job/RollupIndexerJobStats.java | 30 +++- .../job/JobWrapperSerializingTests.java | 2 + .../job/RollupIndexerJobStatsTests.java | 6 + .../xpack/rollup/job/IndexerUtilsTests.java | 11 +- .../rollup/job/RollupIndexerStateTests.java | 24 +++ .../rest-api-spec/test/rollup/delete_job.yml | 42 +++++ .../rest-api-spec/test/rollup/get_jobs.yml | 28 +++ .../rest-api-spec/test/rollup/put_job.yml | 14 ++ 14 files changed, 604 insertions(+), 27 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/StatsAccumulator.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java index 131e6ec0edabb..2d45c0c945905 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java @@ -26,15 +26,15 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; -import java.util.Objects; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static java.util.Collections.unmodifiableList; import static java.util.stream.Collectors.joining; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Response from rollup's get jobs api. @@ -51,6 +51,15 @@ public class GetRollupJobResponse { static final ParseField STATE = new ParseField("job_state"); static final ParseField CURRENT_POSITION = new ParseField("current_position"); static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id"); + static final ParseField BULK_LATENCY = new ParseField("bulk_latency_in_ms"); + static final ParseField SEARCH_LATENCY = new ParseField("search_latency_in_ms"); + static final ParseField SEARCH_FAILURES = new ParseField("search_failures"); + static final ParseField BULK_FAILURES = new ParseField("bulk_failures"); + static final ParseField MIN = new ParseField("min"); + static final ParseField MAX = new ParseField("max"); + static final ParseField AVG = new ParseField("avg"); + static final ParseField COUNT = new ParseField("count"); + static final ParseField TOTAL = new ParseField("total"); private List jobs; @@ -181,12 +190,22 @@ public static class RollupIndexerJobStats { private final long numInputDocuments; private final long numOuputDocuments; private final long numInvocations; - - RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations) { + private StatsAccumulator bulkLatency; + private StatsAccumulator searchLatency; + private long bulkFailures; + private long searchFailures; + + RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, + StatsAccumulator bulkLatency, StatsAccumulator searchLatency, long bulkFailures, + long searchFailures) { this.numPages = numPages; this.numInputDocuments = numInputDocuments; this.numOuputDocuments = numOuputDocuments; this.numInvocations = numInvocations; + this.bulkLatency = bulkLatency; + this.searchLatency = searchLatency; + this.bulkFailures = bulkFailures; + this.searchFailures = searchFailures; } /** @@ -217,15 +236,50 @@ public long getOutputDocuments() { return numOuputDocuments; } + /** + * Number of failures that have occurred during the bulk indexing phase of Rollup + */ + public long getBulkFailures() { + return bulkFailures; + } + + /** + * Number of failures that have occurred during the search phase of Rollup + */ + public long getSearchFailures() { + return searchFailures; + } + + /** + * Returns an object which contains latency stats (min/max/avg/count) for the bulk + * indexing phase of Rollup + */ + public StatsAccumulator getBulkLatency() { + return bulkLatency; + } + + /** + * Returns an object which contains latency stats (min/max/avg/count) for the + * search phase of Rollup + */ + public StatsAccumulator getSearchLatency() { + return searchLatency; + } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( STATS.getPreferredName(), true, - args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3])); + args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3], + (StatsAccumulator) args[4], (StatsAccumulator) args[5], (long) args[6], (long) args[7])); static { PARSER.declareLong(constructorArg(), NUM_PAGES); PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); + PARSER.declareObject(constructorArg(), StatsAccumulator.PARSER::apply, BULK_LATENCY); + PARSER.declareObject(constructorArg(), StatsAccumulator.PARSER::apply, SEARCH_LATENCY); + PARSER.declareLong(constructorArg(), BULK_FAILURES); + PARSER.declareLong(constructorArg(), SEARCH_FAILURES); } @Override @@ -236,12 +290,17 @@ public boolean equals(Object other) { return Objects.equals(this.numPages, that.numPages) && Objects.equals(this.numInputDocuments, that.numInputDocuments) && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) - && Objects.equals(this.numInvocations, that.numInvocations); + && Objects.equals(this.numInvocations, that.numInvocations) + && Objects.equals(this.bulkLatency, that.bulkLatency) + && Objects.equals(this.searchLatency, that.searchLatency) + && Objects.equals(this.bulkFailures, that.bulkFailures) + && Objects.equals(this.searchFailures, that.searchFailures); } @Override public int hashCode() { - return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations); + return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations, + bulkLatency, searchLatency, bulkFailures, searchFailures); } @Override @@ -249,7 +308,11 @@ public final String toString() { return "{pages=" + numPages + ", input_docs=" + numInputDocuments + ", output_docs=" + numOuputDocuments - + ", invocations=" + numInvocations + "}"; + + ", invocations=" + numInvocations + + ", bulk_failures=" + bulkFailures + + ", search_failures=" + searchFailures + + ", bulk_latency=" + bulkLatency + + ", search_latency=" + searchLatency + "}"; } } @@ -371,4 +434,89 @@ String value() { return name().toLowerCase(Locale.ROOT); } } + + public static class StatsAccumulator { + + private static final String NAME = "stats_accumulator"; + private static final ParseField MIN = new ParseField("min"); + private static final ParseField MAX = new ParseField("max"); + private static final ParseField AVG = new ParseField("avg"); + private static final ParseField COUNT = new ParseField("count"); + private static final ParseField TOTAL = new ParseField("total"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>(NAME, true, + args -> new StatsAccumulator((long) args[0], (long) args[1], (long) args[2], (long) args[3])); + + static { + PARSER.declareLong(constructorArg(), COUNT); + PARSER.declareLong(constructorArg(), TOTAL); + PARSER.declareLong(constructorArg(), MIN); + PARSER.declareLong(constructorArg(), MAX); + PARSER.declareLong(constructorArg(), AVG); // We parse but don't actually use the avg + } + + private long count; + private long total; + private long min; + private long max; + + StatsAccumulator(long count, long total, long min, long max) { + this.count = count; + this.total = total; + this.min = min; + this.max = max; + } + + public long getCount() { + return count; + } + + public long getMin() { + return count == 0 ? 0 : min; + } + + public long getMax() { + return count == 0 ? 0 : max; + } + + public double getAvg() { + return count == 0 ? 0.0 : (double) total / (double) count; + } + + public long getTotal() { + return total; + } + + @Override + public int hashCode() { + return Objects.hash(count, total, min, max); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + StatsAccumulator other = (StatsAccumulator) obj; + return Objects.equals(count, other.count) + && Objects.equals(total, other.total) + && Objects.equals(min, other.min) + && Objects.equals(max, other.max); + } + + @Override + public final String toString() { + return "{count=" + getCount() + + ", total=" + getTotal() + + ", min=" + getMin() + + ", max=" + getMax() + + ", avg=" + getAvg() + "}"; + } + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java index 41979a4b92d4c..6f58b4825f7c5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java @@ -62,7 +62,9 @@ private GetRollupJobResponse createTestInstance() { } private RollupIndexerJobStats randomStats() { - return new RollupIndexerJobStats(randomLong(), randomLong(), randomLong(), randomLong()); + return new RollupIndexerJobStats(randomLong(), randomLong(), randomLong(), randomLong(), + new GetRollupJobResponse.StatsAccumulator(0, 0, 0, 0), + new GetRollupJobResponse.StatsAccumulator(0, 0, 0, 0), 0, 0); } private RollupJobStatus randomStatus() { @@ -115,6 +117,23 @@ public void toXContent(RollupIndexerJobStats stats, XContentBuilder builder, ToX builder.field(GetRollupJobResponse.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments()); builder.field(GetRollupJobResponse.NUM_OUTPUT_DOCUMENTS.getPreferredName(), stats.getOutputDocuments()); builder.field(GetRollupJobResponse.NUM_INVOCATIONS.getPreferredName(), stats.getNumInvocations()); + builder.field(GetRollupJobResponse.BULK_LATENCY.getPreferredName()); + toXContent(stats.getBulkLatency(), builder, params); + builder.field(GetRollupJobResponse.SEARCH_LATENCY.getPreferredName()); + toXContent(stats.getSearchLatency(), builder, params); + builder.field(GetRollupJobResponse.BULK_FAILURES.getPreferredName(), stats.getBulkFailures()); + builder.field(GetRollupJobResponse.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures()); + builder.endObject(); + } + + public void toXContent(GetRollupJobResponse.StatsAccumulator stats, XContentBuilder builder, + ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(GetRollupJobResponse.MIN.getPreferredName(), stats.getMin()); + builder.field(GetRollupJobResponse.MAX.getPreferredName(), stats.getMax()); + builder.field(GetRollupJobResponse.AVG.getPreferredName(), stats.getAvg()); + builder.field(GetRollupJobResponse.TOTAL.getPreferredName(), stats.getTotal()); + builder.field(GetRollupJobResponse.COUNT.getPreferredName(), stats.getCount()); builder.endObject(); } } diff --git a/docs/reference/rollup/apis/get-job.asciidoc b/docs/reference/rollup/apis/get-job.asciidoc index 794d72480121b..9c1da9cc3b6de 100644 --- a/docs/reference/rollup/apis/get-job.asciidoc +++ b/docs/reference/rollup/apis/get-job.asciidoc @@ -101,7 +101,23 @@ Which will yield the following response: "pages_processed" : 0, "documents_processed" : 0, "rollups_indexed" : 0, - "trigger_count" : 0 + "trigger_count" : 0, + "bulk_failures": 0, + "bulk_latency_in_ms": { + "count": 0, + "total": 0, + "min": 0, + "max": 0, + "avg": 0.0 + }, + "search_failures": 0, + "search_latency_in_ms": { + "count": 0, + "total": 0, + "min": 0, + "max": 0, + "avg": 0.0 + } } } ] @@ -221,7 +237,23 @@ Which will yield the following response: "pages_processed" : 0, "documents_processed" : 0, "rollups_indexed" : 0, - "trigger_count" : 0 + "trigger_count" : 0, + "bulk_failures": 0, + "bulk_latency": { + "count": 0, + "total": 0.0, + "min": 0.0, + "max": 0.0, + "avg": 0.0 + }, + "search_failures": 0, + "search_latency": { + "count": 0, + "total": 0.0, + "min": 0.0, + "max": 0.0, + "avg": 0.0 + } } }, { @@ -270,7 +302,23 @@ Which will yield the following response: "pages_processed" : 0, "documents_processed" : 0, "rollups_indexed" : 0, - "trigger_count" : 0 + "trigger_count" : 0, + "bulk_failures": 0, + "bulk_latency": { + "count": 0, + "total": 0.0, + "min": 0.0, + "max": 0.0, + "avg": 0.0 + }, + "search_failures": 0, + "search_latency": { + "count": 0, + "total": 0.0, + "min": 0.0, + "max": 0.0, + "avg": 0.0 + } } } ] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java index c121b263ad67c..636b9cfd355a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java @@ -153,6 +153,7 @@ public synchronized boolean maybeTriggerAsyncJob(long now) { // fire off the search. Note this is async, the method will return from here executor.execute(() -> { try { + stats.markStartSearch(); doNextSearch(buildSearchRequest(), ActionListener.wrap(this::onSearchResponse, exc -> finishWithFailure(exc))); } catch (Exception e) { finishWithFailure(e); @@ -291,6 +292,7 @@ private IndexerState finishAndSetState() { } private void onSearchResponse(SearchResponse searchResponse) { + stats.markEndSearch(); try { if (checkState(getState()) == false) { return; @@ -320,6 +322,7 @@ private void onSearchResponse(SearchResponse searchResponse) { // TODO this might be a valid case, e.g. if implementation filters assert bulkRequest.requests().size() > 0; + stats.markStartBulk(); doNextBulk(bulkRequest, ActionListener.wrap(bulkResponse -> { // TODO we should check items in the response and move after accordingly to // resume the failing buckets ? @@ -335,16 +338,23 @@ private void onSearchResponse(SearchResponse searchResponse) { position.set(newPosition); onBulkResponse(bulkResponse, newPosition); - }, exc -> finishWithFailure(exc))); + }, exc -> { + stats.incrementBulkFailures(); + finishWithFailure(exc); + })); } catch (Exception e) { + stats.incrementSearchFailures(); finishWithFailure(e); } } private void onBulkResponse(BulkResponse response, JobPosition position) { + stats.markEndBulk(); try { - - ActionListener listener = ActionListener.wrap(this::onSearchResponse, this::finishWithFailure); + ActionListener listener = ActionListener.wrap(this::onSearchResponse, e -> { + stats.incrementSearchFailures(); + finishWithFailure(e); + }); // TODO probably something more intelligent than every-50 is needed if (stats.getNumPages() > 0 && stats.getNumPages() % 50 == 0) { doSaveState(IndexerState.INDEXING, position, () -> doNextSearch(buildSearchRequest(), listener)); @@ -352,6 +362,7 @@ private void onBulkResponse(BulkResponse response, JobPosition position) { doNextSearch(buildSearchRequest(), listener); } } catch (Exception e) { + stats.incrementBulkFailures(); finishWithFailure(e); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java index 2453504a5ba77..5567c6ca8c3ae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.indexing; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -27,15 +28,27 @@ public abstract class IndexerJobStats implements ToXContentObject, Writeable { protected long numInputDocuments = 0; protected long numOuputDocuments = 0; protected long numInvocations = 0; + protected StatsAccumulator bulkLatency = new StatsAccumulator(); + protected StatsAccumulator searchLatency = new StatsAccumulator(); + protected long bulkFailures = 0; + protected long searchFailures = 0; + + private long startBulkTime; + private long startSearchTime; public IndexerJobStats() { } - public IndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations) { + public IndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, + StatsAccumulator bulkLatency, StatsAccumulator searchLatency, long bulkFailures, long searchFailures) { this.numPages = numPages; this.numInputDocuments = numInputDocuments; this.numOuputDocuments = numOuputDocuments; this.numInvocations = numInvocations; + this.bulkLatency = bulkLatency; + this.searchLatency = searchLatency; + this.bulkFailures = bulkFailures; + this.searchFailures = searchFailures; } public IndexerJobStats(StreamInput in) throws IOException { @@ -43,6 +56,13 @@ public IndexerJobStats(StreamInput in) throws IOException { this.numInputDocuments = in.readVLong(); this.numOuputDocuments = in.readVLong(); this.numInvocations = in.readVLong(); + // TODO change this after backport + if (in.getVersion().onOrAfter(Version.CURRENT)) { + this.bulkLatency = new StatsAccumulator(in); + this.searchLatency = new StatsAccumulator(in); + this.bulkFailures = in.readVLong(); + this.searchFailures = in.readVLong(); + } } public long getNumPages() { @@ -61,6 +81,22 @@ public long getOutputDocuments() { return numOuputDocuments; } + public long getBulkFailures() { + return bulkFailures; + } + + public long getSearchFailures() { + return searchFailures; + } + + public StatsAccumulator getBulkLatency() { + return bulkLatency; + } + + public StatsAccumulator getSearchLatency() { + return searchLatency; + } + public void incrementNumPages(long n) { assert(n >= 0); numPages += n; @@ -81,12 +117,43 @@ public void incrementNumOutputDocuments(long n) { numOuputDocuments += n; } + public void incrementBulkFailures() { + this.bulkFailures += 1; + } + + public void incrementSearchFailures() { + this.searchFailures += 1; + } + + public void markStartBulk() { + this.startBulkTime = System.nanoTime(); + } + + public void markEndBulk() { + bulkLatency.add((System.nanoTime() - startBulkTime) / 1000000); + } + + public void markStartSearch() { + this.startSearchTime = System.nanoTime(); + } + + public void markEndSearch() { + searchLatency.add((System.nanoTime() - startSearchTime) / 1000000); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(numPages); out.writeVLong(numInputDocuments); out.writeVLong(numOuputDocuments); out.writeVLong(numInvocations); + // TODO change after backport + if (out.getVersion().onOrAfter(Version.CURRENT)) { + bulkLatency.writeTo(out); + searchLatency.writeTo(out); + out.writeVLong(bulkFailures); + out.writeVLong(searchFailures); + } } @Override @@ -104,11 +171,16 @@ public boolean equals(Object other) { return Objects.equals(this.numPages, that.numPages) && Objects.equals(this.numInputDocuments, that.numInputDocuments) && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) - && Objects.equals(this.numInvocations, that.numInvocations); + && Objects.equals(this.numInvocations, that.numInvocations) + && Objects.equals(this.bulkLatency, that.bulkLatency) + && Objects.equals(this.searchLatency, that.searchLatency) + && Objects.equals(this.bulkFailures, that.bulkFailures) + && Objects.equals(this.searchFailures, that.searchFailures); } @Override public int hashCode() { - return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations); + return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations, + bulkLatency, searchLatency, bulkFailures, searchFailures); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/StatsAccumulator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/StatsAccumulator.java new file mode 100644 index 0000000000000..363faa5168ca4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/StatsAccumulator.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.indexing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Helper class to collect min, max, avg and total statistics for a quantity. + * + * Derived from ML's version. Ttweaked to be xcontent/writeable and + * handle "empty" scenario differently + */ +public class StatsAccumulator implements Writeable, ToXContentFragment { + + private static final String NAME = "stats_accumulator"; + private static final ParseField MIN = new ParseField("min"); + private static final ParseField MAX = new ParseField("max"); + private static final ParseField AVG = new ParseField("avg"); + private static final ParseField COUNT = new ParseField("count"); + private static final ParseField TOTAL = new ParseField("total"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>(NAME, true, + args -> new StatsAccumulator((long) args[0], (long) args[1], (long) args[2], (long) args[3])); + + static { + PARSER.declareLong(constructorArg(), COUNT); + PARSER.declareLong(constructorArg(), TOTAL); + PARSER.declareLong(constructorArg(), MIN); + PARSER.declareLong(constructorArg(), MAX); + PARSER.declareLong(constructorArg(), AVG); // We parse but don't actually use the avg + } + + private long count = 0; + private long total = 0; + private long min = 0; + private long max = 0; + + public StatsAccumulator() { + } + + public StatsAccumulator(StreamInput in) throws IOException { + count = in.readVLong(); + total = in.readVLong(); + min = in.readVLong(); + max = in.readVLong(); + } + + private StatsAccumulator(long count, long total, long min, long max) { + this.count = count; + this.total = total; + this.min = min; + this.max = max; + } + + public void add(long value) { + min = count == 0 ? value : Math.min(min, value); + max = count == 0 ? value : Math.max(max, value); + count += 1; + total += value; + } + + public long getCount() { + return count; + } + + public long getMin() { + return count == 0 ? 0 : min; + } + + public long getMax() { + return count == 0 ? 0 : max; + } + + public double getAvg() { + return count == 0 ? 0.0 : (double) total / (double) count; + } + + public long getTotal() { + return total; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(COUNT.getPreferredName(), count); + builder.field(TOTAL.getPreferredName(), total); + builder.field(MIN.getPreferredName(), getMin()); + builder.field(MAX.getPreferredName(), getMax()); + builder.field(AVG.getPreferredName(), getAvg()); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(count); + out.writeVLong(total); + out.writeVLong(min); + out.writeVLong(max); + } + + @Override + public int hashCode() { + return Objects.hash(count, total, min, max); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + StatsAccumulator other = (StatsAccumulator) obj; + return Objects.equals(count, other.count) + && Objects.equals(total, other.total) + && Objects.equals(min, other.min) + && Objects.equals(max, other.max); + } +} + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStats.java index 87915671b79a2..2d6a4860f601c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStats.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.indexing.IndexerJobStats; +import org.elasticsearch.xpack.core.indexing.StatsAccumulator; import java.io.IOException; @@ -25,24 +26,35 @@ public class RollupIndexerJobStats extends IndexerJobStats { private static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); private static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("rollups_indexed"); private static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); + private static ParseField BULK_LATENCY = new ParseField("bulk_latency_in_ms"); + private static ParseField SEARCH_LATENCY = new ParseField("search_latency_in_ms"); + private static ParseField SEARCH_FAILURES = new ParseField("search_failures"); + private static ParseField BULK_FAILURES = new ParseField("bulk_failures"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME.getPreferredName(), - args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3])); + args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3], + (StatsAccumulator) args[4], (StatsAccumulator) args[5], (long) args[6], (long) args[7])); static { PARSER.declareLong(constructorArg(), NUM_PAGES); PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); + PARSER.declareObject(constructorArg(), StatsAccumulator.PARSER::apply, BULK_LATENCY); + PARSER.declareObject(constructorArg(), StatsAccumulator.PARSER::apply, SEARCH_LATENCY); + PARSER.declareLong(constructorArg(), BULK_FAILURES); + PARSER.declareLong(constructorArg(), SEARCH_FAILURES); } public RollupIndexerJobStats() { super(); } - public RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations) { - super(numPages, numInputDocuments, numOuputDocuments, numInvocations); + public RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, + StatsAccumulator bulkLatency, StatsAccumulator searchLatency, long bulkFailures, + long searchFailures) { + super(numPages, numInputDocuments, numOuputDocuments, numInvocations, bulkLatency, searchLatency, bulkFailures, searchFailures); } public RollupIndexerJobStats(StreamInput in) throws IOException { @@ -56,6 +68,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(NUM_INPUT_DOCUMENTS.getPreferredName(), numInputDocuments); builder.field(NUM_OUTPUT_DOCUMENTS.getPreferredName(), numOuputDocuments); builder.field(NUM_INVOCATIONS.getPreferredName(), numInvocations); + if (bulkLatency != null) { + builder.startObject(BULK_LATENCY.getPreferredName()); + bulkLatency.toXContent(builder, params); + builder.endObject(); + } + if (searchLatency != null) { + builder.startObject(SEARCH_LATENCY.getPreferredName()); + searchLatency.toXContent(builder, params); + builder.endObject(); + } + builder.field(BULK_FAILURES.getPreferredName(), bulkFailures); + builder.field(SEARCH_FAILURES.getPreferredName(), searchFailures); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java index 1ab6e6a55d495..bba472eafe197 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.core.indexing.StatsAccumulator; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction; @@ -42,6 +43,7 @@ protected GetRollupJobsAction.JobWrapper createTestInstance() { return new GetRollupJobsAction.JobWrapper(ConfigTestHelpers.randomRollupJobConfig(random()), new RollupIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong(), new StatsAccumulator(), new StatsAccumulator(), randomNonNegativeLong(), randomNonNegativeLong()), new RollupJobStatus(state, Collections.emptyMap(), randomBoolean())); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStatsTests.java index 81f31e2e5c4eb..5d064fe869cb6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStatsTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.indexing.StatsAccumulator; public class RollupIndexerJobStatsTests extends AbstractSerializingTestCase { @@ -28,7 +29,12 @@ protected RollupIndexerJobStats doParseInstance(XContentParser parser) { public static RollupIndexerJobStats randomStats() { return new RollupIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong(), new StatsAccumulator(), new StatsAccumulator(), randomNonNegativeLong(), randomNonNegativeLong()); } + @Override + protected boolean supportsUnknownFields() { + return false; + } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index bee43bce47112..0f4cfd43b1f95 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.xpack.core.indexing.StatsAccumulator; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; @@ -66,7 +67,7 @@ public class IndexerUtilsTests extends AggregatorTestCase { public void testMissingFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0); + RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -130,7 +131,7 @@ public void testMissingFields() throws IOException { public void testCorrectFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0); + RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -198,7 +199,7 @@ public void testCorrectFields() throws IOException { public void testNumericTerms() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats= new RollupIndexerJobStats(0, 0, 0, 0); + RollupIndexerJobStats stats= new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); String valueField = "the_avg"; @@ -254,7 +255,7 @@ public void testNumericTerms() throws IOException { public void testEmptyCounts() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0); + RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); String timestampField = "ts"; String valueField = "the_avg"; @@ -490,7 +491,7 @@ public void testNullKeys() { public void testMissingBuckets() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0); + RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); String metricField = "metric_field"; String valueField = "value_field"; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java index c74ecbadf4fbe..fbec9100a22dd 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java @@ -42,6 +42,7 @@ import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.startsWith; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -228,6 +229,11 @@ public void testStarted() throws Exception { ESTestCase.awaitBusy(() -> indexer.getState() == IndexerState.STARTED); assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumPages(), equalTo(1L)); + assertThat(indexer.getStats().getBulkFailures(), equalTo(0L)); + assertThat(indexer.getStats().getSearchFailures(), equalTo(0L)); + // We can't check the actual min/max/avg stats since they may be too fast to register + // but we can check the count having incremented + assertThat(indexer.getStats().getSearchLatency().getCount(), greaterThan(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -257,6 +263,11 @@ protected void onFinish() { assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumPages(), equalTo(1L)); + assertThat(indexer.getStats().getBulkFailures(), equalTo(0L)); + assertThat(indexer.getStats().getSearchFailures(), equalTo(0L)); + // We can't check the actual min/max/avg stats since they may be too fast to register + // but we can check the count having incremented + assertThat(indexer.getStats().getSearchLatency().getCount(), greaterThan(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -339,6 +350,7 @@ protected void onAbort() { assertThat(indexer.getState(), equalTo(IndexerState.ABORTING)); assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumPages(), equalTo(0L)); + assertThat(indexer.getStats().getSearchFailures(), equalTo(0L)); } finally { executor.shutdownNow(); } @@ -638,6 +650,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumPages(), equalTo(1L)); + // There should be one recorded failure + assertThat(indexer.getStats().getSearchFailures(), equalTo(1L)); + // Note: no docs were indexed assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); assertTrue(indexer.abort()); @@ -742,6 +757,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumPages(), equalTo(1L)); + // There should be one recorded failure + assertThat(indexer.getStats().getSearchFailures(), equalTo(1L)); + // Note: no docs were indexed assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); assertTrue(indexer.abort()); @@ -784,6 +802,9 @@ public void testSearchShardFailure() throws Exception { assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); + // There should be one recorded failure + assertThat(indexer.getStats().getSearchFailures(), equalTo(1L)); + // Note: no pages processed, no docs were indexed assertThat(indexer.getStats().getNumPages(), equalTo(0L)); assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); @@ -895,6 +916,9 @@ protected void doNextBulk(BulkRequest request, ActionListener next assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumPages(), equalTo(1L)); + // There should be one recorded failure + assertThat(indexer.getStats().getBulkFailures(), equalTo(1L)); + // Note: no docs were indexed assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); assertTrue(indexer.abort()); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml index db485279b2bf2..9769e7efb2305 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml @@ -70,6 +70,20 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 + bulk_failures: 0 + search_failures: 0 + bulk_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 + search_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 status: job_state: "stopped" upgraded_doc_id: true @@ -117,6 +131,20 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 + bulk_failures: 0 + search_failures: 0 + bulk_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 + search_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 status: job_state: "stopped" upgraded_doc_id: true @@ -164,6 +192,20 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 + bulk_failures: 0 + search_failures: 0 + bulk_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 + search_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 status: job_state: "stopped" upgraded_doc_id: true diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml index 4db805ae2f9f8..2e929ac19cb4f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -71,6 +71,20 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 + bulk_failures: 0 + search_failures: 0 + bulk_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 + search_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 status: job_state: "stopped" upgraded_doc_id: true @@ -208,6 +222,20 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 + bulk_failures: 0 + search_failures: 0 + bulk_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 + search_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 status: job_state: "stopped" upgraded_doc_id: true diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml index e0371cf5f0949..45c247b1d2fb3 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml @@ -71,6 +71,20 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 + bulk_failures: 0 + search_failures: 0 + bulk_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 + search_latency_in_ms: + count: 0 + min: 0 + max: 0 + avg: 0.0 + total: 0 status: job_state: "stopped" upgraded_doc_id: true From 6505b73e6c93a8f87c615fa13cc7f835d8068918 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Wed, 21 Nov 2018 15:08:51 -0500 Subject: [PATCH 2/3] review cleanup --- .../client/rollup/GetRollupJobResponse.java | 181 ++++++------------ .../rollup/GetRollupJobResponseTests.java | 26 +-- docs/reference/rollup/apis/get-job.asciidoc | 60 ++---- .../core/indexing/AsyncTwoPhaseIndexer.java | 32 ++-- .../xpack/core/indexing/IndexerJobStats.java | 91 +++++---- .../xpack/core/indexing/StatsAccumulator.java | 138 ------------- .../rollup/job/RollupIndexerJobStats.java | 40 ++-- .../job/JobWrapperSerializingTests.java | 5 +- .../job/RollupIndexerJobStatsTests.java | 5 +- .../xpack/rollup/job/IndexerUtilsTests.java | 11 +- .../rollup/job/RollupIndexerStateTests.java | 18 +- .../rest-api-spec/test/rollup/delete_job.yml | 54 ++---- .../rest-api-spec/test/rollup/get_jobs.yml | 36 +--- .../rest-api-spec/test/rollup/put_job.yml | 18 +- 14 files changed, 219 insertions(+), 496 deletions(-) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/StatsAccumulator.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java index 2d45c0c945905..3f48c9a36d9b3 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java @@ -51,10 +51,12 @@ public class GetRollupJobResponse { static final ParseField STATE = new ParseField("job_state"); static final ParseField CURRENT_POSITION = new ParseField("current_position"); static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id"); - static final ParseField BULK_LATENCY = new ParseField("bulk_latency_in_ms"); - static final ParseField SEARCH_LATENCY = new ParseField("search_latency_in_ms"); + static final ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms"); + static final ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms"); + static final ParseField INDEX_TOTAL = new ParseField("index_total"); + static final ParseField SEARCH_TOTAL = new ParseField("search_total"); static final ParseField SEARCH_FAILURES = new ParseField("search_failures"); - static final ParseField BULK_FAILURES = new ParseField("bulk_failures"); + static final ParseField INDEX_FAILURES = new ParseField("index_failures"); static final ParseField MIN = new ParseField("min"); static final ParseField MAX = new ParseField("max"); static final ParseField AVG = new ParseField("avg"); @@ -190,21 +192,24 @@ public static class RollupIndexerJobStats { private final long numInputDocuments; private final long numOuputDocuments; private final long numInvocations; - private StatsAccumulator bulkLatency; - private StatsAccumulator searchLatency; - private long bulkFailures; + private long indexTime; + private long indexTotal; + private long searchTime; + private long searchTotal; + private long indexFailures; private long searchFailures; RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, - StatsAccumulator bulkLatency, StatsAccumulator searchLatency, long bulkFailures, - long searchFailures) { + long indexTime, long indexTotal, long searchTime, long searchTotal, long indexFailures, long searchFailures) { this.numPages = numPages; this.numInputDocuments = numInputDocuments; this.numOuputDocuments = numOuputDocuments; this.numInvocations = numInvocations; - this.bulkLatency = bulkLatency; - this.searchLatency = searchLatency; - this.bulkFailures = bulkFailures; + this.indexTime = indexTime; + this.indexTotal = indexTotal; + this.searchTime = searchTime; + this.searchTotal = searchTotal; + this.indexFailures = indexFailures; this.searchFailures = searchFailures; } @@ -239,8 +244,8 @@ public long getOutputDocuments() { /** * Number of failures that have occurred during the bulk indexing phase of Rollup */ - public long getBulkFailures() { - return bulkFailures; + public long getIndexFailures() { + return indexFailures; } /** @@ -251,34 +256,49 @@ public long getSearchFailures() { } /** - * Returns an object which contains latency stats (min/max/avg/count) for the bulk - * indexing phase of Rollup + * Returns the time spent indexing (cumulative) in milliseconds */ - public StatsAccumulator getBulkLatency() { - return bulkLatency; + public long getIndexTime() { + return indexTime; } /** - * Returns an object which contains latency stats (min/max/avg/count) for the - * search phase of Rollup + * Returns the time spent searching (cumulative) in milliseconds */ - public StatsAccumulator getSearchLatency() { - return searchLatency; + public long getSearchTime() { + return searchTime; + } + + /** + * Returns the total number of indexing requests that have been sent by the rollup job + * (Note: this is not the number of _documents_ that have been indexed) + */ + public long getIndexTotal() { + return indexTotal; + } + + /** + * Returns the total number of search requests that have been sent by the rollup job + */ + public long getSearchTotal() { + return searchTotal; } private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( STATS.getPreferredName(), true, args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3], - (StatsAccumulator) args[4], (StatsAccumulator) args[5], (long) args[6], (long) args[7])); + (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9])); static { PARSER.declareLong(constructorArg(), NUM_PAGES); PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); - PARSER.declareObject(constructorArg(), StatsAccumulator.PARSER::apply, BULK_LATENCY); - PARSER.declareObject(constructorArg(), StatsAccumulator.PARSER::apply, SEARCH_LATENCY); - PARSER.declareLong(constructorArg(), BULK_FAILURES); + PARSER.declareLong(constructorArg(), INDEX_TIME_IN_MS); + PARSER.declareLong(constructorArg(), INDEX_TOTAL); + PARSER.declareLong(constructorArg(), SEARCH_TIME_IN_MS); + PARSER.declareLong(constructorArg(), SEARCH_TOTAL); + PARSER.declareLong(constructorArg(), INDEX_FAILURES); PARSER.declareLong(constructorArg(), SEARCH_FAILURES); } @@ -288,19 +308,21 @@ public boolean equals(Object other) { if (other == null || getClass() != other.getClass()) return false; RollupIndexerJobStats that = (RollupIndexerJobStats) other; return Objects.equals(this.numPages, that.numPages) - && Objects.equals(this.numInputDocuments, that.numInputDocuments) - && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) - && Objects.equals(this.numInvocations, that.numInvocations) - && Objects.equals(this.bulkLatency, that.bulkLatency) - && Objects.equals(this.searchLatency, that.searchLatency) - && Objects.equals(this.bulkFailures, that.bulkFailures) - && Objects.equals(this.searchFailures, that.searchFailures); + && Objects.equals(this.numInputDocuments, that.numInputDocuments) + && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) + && Objects.equals(this.numInvocations, that.numInvocations) + && Objects.equals(this.indexTime, that.indexTime) + && Objects.equals(this.searchTime, that.searchTime) + && Objects.equals(this.indexFailures, that.indexFailures) + && Objects.equals(this.searchFailures, that.searchFailures) + && Objects.equals(this.searchTotal, that.searchTotal) + && Objects.equals(this.indexTotal, that.indexTotal); } @Override public int hashCode() { return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations, - bulkLatency, searchLatency, bulkFailures, searchFailures); + indexTime, searchTime, indexFailures, searchFailures, searchTotal, indexTotal); } @Override @@ -309,10 +331,12 @@ public final String toString() { + ", input_docs=" + numInputDocuments + ", output_docs=" + numOuputDocuments + ", invocations=" + numInvocations - + ", bulk_failures=" + bulkFailures + + ", index_failures=" + indexFailures + ", search_failures=" + searchFailures - + ", bulk_latency=" + bulkLatency - + ", search_latency=" + searchLatency + "}"; + + ", index_time_in_ms=" + indexTime + + ", index_total=" + indexTotal + + ", search_time_in_ms=" + searchTime + + ", search_total=" + searchTotal+ "}"; } } @@ -434,89 +458,4 @@ String value() { return name().toLowerCase(Locale.ROOT); } } - - public static class StatsAccumulator { - - private static final String NAME = "stats_accumulator"; - private static final ParseField MIN = new ParseField("min"); - private static final ParseField MAX = new ParseField("max"); - private static final ParseField AVG = new ParseField("avg"); - private static final ParseField COUNT = new ParseField("count"); - private static final ParseField TOTAL = new ParseField("total"); - - public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(NAME, true, - args -> new StatsAccumulator((long) args[0], (long) args[1], (long) args[2], (long) args[3])); - - static { - PARSER.declareLong(constructorArg(), COUNT); - PARSER.declareLong(constructorArg(), TOTAL); - PARSER.declareLong(constructorArg(), MIN); - PARSER.declareLong(constructorArg(), MAX); - PARSER.declareLong(constructorArg(), AVG); // We parse but don't actually use the avg - } - - private long count; - private long total; - private long min; - private long max; - - StatsAccumulator(long count, long total, long min, long max) { - this.count = count; - this.total = total; - this.min = min; - this.max = max; - } - - public long getCount() { - return count; - } - - public long getMin() { - return count == 0 ? 0 : min; - } - - public long getMax() { - return count == 0 ? 0 : max; - } - - public double getAvg() { - return count == 0 ? 0.0 : (double) total / (double) count; - } - - public long getTotal() { - return total; - } - - @Override - public int hashCode() { - return Objects.hash(count, total, min, max); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - StatsAccumulator other = (StatsAccumulator) obj; - return Objects.equals(count, other.count) - && Objects.equals(total, other.total) - && Objects.equals(min, other.min) - && Objects.equals(max, other.max); - } - - @Override - public final String toString() { - return "{count=" + getCount() - + ", total=" + getTotal() - + ", min=" + getMin() - + ", max=" + getMax() - + ", avg=" + getAvg() + "}"; - } - } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java index 6f58b4825f7c5..3b3c94c31f59c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java @@ -62,9 +62,9 @@ private GetRollupJobResponse createTestInstance() { } private RollupIndexerJobStats randomStats() { - return new RollupIndexerJobStats(randomLong(), randomLong(), randomLong(), randomLong(), - new GetRollupJobResponse.StatsAccumulator(0, 0, 0, 0), - new GetRollupJobResponse.StatsAccumulator(0, 0, 0, 0), 0, 0); + return new RollupIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); } private RollupJobStatus randomStatus() { @@ -117,23 +117,13 @@ public void toXContent(RollupIndexerJobStats stats, XContentBuilder builder, ToX builder.field(GetRollupJobResponse.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments()); builder.field(GetRollupJobResponse.NUM_OUTPUT_DOCUMENTS.getPreferredName(), stats.getOutputDocuments()); builder.field(GetRollupJobResponse.NUM_INVOCATIONS.getPreferredName(), stats.getNumInvocations()); - builder.field(GetRollupJobResponse.BULK_LATENCY.getPreferredName()); - toXContent(stats.getBulkLatency(), builder, params); - builder.field(GetRollupJobResponse.SEARCH_LATENCY.getPreferredName()); - toXContent(stats.getSearchLatency(), builder, params); - builder.field(GetRollupJobResponse.BULK_FAILURES.getPreferredName(), stats.getBulkFailures()); + builder.field(GetRollupJobResponse.INDEX_TIME_IN_MS.getPreferredName(), stats.getIndexTime()); + builder.field(GetRollupJobResponse.INDEX_TOTAL.getPreferredName(), stats.getIndexTotal()); + builder.field(GetRollupJobResponse.INDEX_FAILURES.getPreferredName(), stats.getIndexFailures()); + builder.field(GetRollupJobResponse.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime()); + builder.field(GetRollupJobResponse.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal()); builder.field(GetRollupJobResponse.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures()); builder.endObject(); } - public void toXContent(GetRollupJobResponse.StatsAccumulator stats, XContentBuilder builder, - ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(GetRollupJobResponse.MIN.getPreferredName(), stats.getMin()); - builder.field(GetRollupJobResponse.MAX.getPreferredName(), stats.getMax()); - builder.field(GetRollupJobResponse.AVG.getPreferredName(), stats.getAvg()); - builder.field(GetRollupJobResponse.TOTAL.getPreferredName(), stats.getTotal()); - builder.field(GetRollupJobResponse.COUNT.getPreferredName(), stats.getCount()); - builder.endObject(); - } } diff --git a/docs/reference/rollup/apis/get-job.asciidoc b/docs/reference/rollup/apis/get-job.asciidoc index 9c1da9cc3b6de..deb369907d8ad 100644 --- a/docs/reference/rollup/apis/get-job.asciidoc +++ b/docs/reference/rollup/apis/get-job.asciidoc @@ -102,22 +102,12 @@ Which will yield the following response: "documents_processed" : 0, "rollups_indexed" : 0, "trigger_count" : 0, - "bulk_failures": 0, - "bulk_latency_in_ms": { - "count": 0, - "total": 0, - "min": 0, - "max": 0, - "avg": 0.0 - }, + "index_failures": 0, + "index_time_in_ms": 0, + "index_total": 0, "search_failures": 0, - "search_latency_in_ms": { - "count": 0, - "total": 0, - "min": 0, - "max": 0, - "avg": 0.0 - } + "search_time_in_ms": 0, + "search_total": 0 } } ] @@ -238,22 +228,12 @@ Which will yield the following response: "documents_processed" : 0, "rollups_indexed" : 0, "trigger_count" : 0, - "bulk_failures": 0, - "bulk_latency": { - "count": 0, - "total": 0.0, - "min": 0.0, - "max": 0.0, - "avg": 0.0 - }, + "index_failures": 0, + "index_time_in_ms": 0, + "index_total": 0, "search_failures": 0, - "search_latency": { - "count": 0, - "total": 0.0, - "min": 0.0, - "max": 0.0, - "avg": 0.0 - } + "search_time_in_ms": 0, + "search_total": 0 } }, { @@ -303,22 +283,12 @@ Which will yield the following response: "documents_processed" : 0, "rollups_indexed" : 0, "trigger_count" : 0, - "bulk_failures": 0, - "bulk_latency": { - "count": 0, - "total": 0.0, - "min": 0.0, - "max": 0.0, - "avg": 0.0 - }, + "index_failures": 0, + "index_time_in_ms": 0, + "index_total": 0, "search_failures": 0, - "search_latency": { - "count": 0, - "total": 0.0, - "min": 0.0, - "max": 0.0, - "avg": 0.0 - } + "search_time_in_ms": 0, + "search_total": 0 } } ] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java index 636b9cfd355a5..4384144d235f3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java @@ -154,9 +154,9 @@ public synchronized boolean maybeTriggerAsyncJob(long now) { executor.execute(() -> { try { stats.markStartSearch(); - doNextSearch(buildSearchRequest(), ActionListener.wrap(this::onSearchResponse, exc -> finishWithFailure(exc))); + doNextSearch(buildSearchRequest(), ActionListener.wrap(this::onSearchResponse, this::finishWithSearchFailure)); } catch (Exception e) { - finishWithFailure(e); + finishWithSearchFailure(e); } }); logger.debug("Beginning to index [" + getJobId() + "], state: [" + currentState + "]"); @@ -257,7 +257,13 @@ public synchronized boolean maybeTriggerAsyncJob(long now) { */ protected abstract void onAbort(); - private void finishWithFailure(Exception exc) { + private void finishWithSearchFailure(Exception exc) { + stats.incrementSearchFailures(); + doSaveState(finishAndSetState(), position.get(), () -> onFailure(exc)); + } + + private void finishWithIndexingFailure(Exception exc) { + stats.incrementIndexingFailures(); doSaveState(finishAndSetState(), position.get(), () -> onFailure(exc)); } @@ -322,7 +328,7 @@ private void onSearchResponse(SearchResponse searchResponse) { // TODO this might be a valid case, e.g. if implementation filters assert bulkRequest.requests().size() > 0; - stats.markStartBulk(); + stats.markStartIndexing(); doNextBulk(bulkRequest, ActionListener.wrap(bulkResponse -> { // TODO we should check items in the response and move after accordingly to // resume the failing buckets ? @@ -338,23 +344,16 @@ private void onSearchResponse(SearchResponse searchResponse) { position.set(newPosition); onBulkResponse(bulkResponse, newPosition); - }, exc -> { - stats.incrementBulkFailures(); - finishWithFailure(exc); - })); + }, this::finishWithIndexingFailure)); } catch (Exception e) { - stats.incrementSearchFailures(); - finishWithFailure(e); + finishWithSearchFailure(e); } } private void onBulkResponse(BulkResponse response, JobPosition position) { - stats.markEndBulk(); + stats.markEndIndexing(); try { - ActionListener listener = ActionListener.wrap(this::onSearchResponse, e -> { - stats.incrementSearchFailures(); - finishWithFailure(e); - }); + ActionListener listener = ActionListener.wrap(this::onSearchResponse, this::finishWithSearchFailure); // TODO probably something more intelligent than every-50 is needed if (stats.getNumPages() > 0 && stats.getNumPages() % 50 == 0) { doSaveState(IndexerState.INDEXING, position, () -> doNextSearch(buildSearchRequest(), listener)); @@ -362,8 +361,7 @@ private void onBulkResponse(BulkResponse response, JobPosition position) { doNextSearch(buildSearchRequest(), listener); } } catch (Exception e) { - stats.incrementBulkFailures(); - finishWithFailure(e); + finishWithIndexingFailure(e); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java index 5567c6ca8c3ae..414e9ecdfc6c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java @@ -28,26 +28,31 @@ public abstract class IndexerJobStats implements ToXContentObject, Writeable { protected long numInputDocuments = 0; protected long numOuputDocuments = 0; protected long numInvocations = 0; - protected StatsAccumulator bulkLatency = new StatsAccumulator(); - protected StatsAccumulator searchLatency = new StatsAccumulator(); - protected long bulkFailures = 0; + protected long indexTime = 0; + protected long searchTime = 0; + protected long indexTotal = 0; + protected long searchTotal = 0; + protected long indexFailures = 0; protected long searchFailures = 0; - private long startBulkTime; + private long startIndexTime; private long startSearchTime; public IndexerJobStats() { } public IndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, - StatsAccumulator bulkLatency, StatsAccumulator searchLatency, long bulkFailures, long searchFailures) { + long indexTime, long searchTime, long indexTotal, long searchTotal, + long indexFailures, long searchFailures) { this.numPages = numPages; this.numInputDocuments = numInputDocuments; this.numOuputDocuments = numOuputDocuments; this.numInvocations = numInvocations; - this.bulkLatency = bulkLatency; - this.searchLatency = searchLatency; - this.bulkFailures = bulkFailures; + this.indexTime = indexTime; + this.searchTime = searchTime; + this.indexTotal = indexTotal; + this.searchTotal = searchTotal; + this.indexFailures = indexFailures; this.searchFailures = searchFailures; } @@ -58,9 +63,11 @@ public IndexerJobStats(StreamInput in) throws IOException { this.numInvocations = in.readVLong(); // TODO change this after backport if (in.getVersion().onOrAfter(Version.CURRENT)) { - this.bulkLatency = new StatsAccumulator(in); - this.searchLatency = new StatsAccumulator(in); - this.bulkFailures = in.readVLong(); + this.indexTime = in.readVLong(); + this.searchTime = in.readVLong(); + this.indexTotal = in.readVLong(); + this.searchTotal = in.readVLong(); + this.indexFailures = in.readVLong(); this.searchFailures = in.readVLong(); } } @@ -81,20 +88,28 @@ public long getOutputDocuments() { return numOuputDocuments; } - public long getBulkFailures() { - return bulkFailures; + public long getIndexFailures() { + return indexFailures; } public long getSearchFailures() { return searchFailures; } - public StatsAccumulator getBulkLatency() { - return bulkLatency; + public long getIndexTime() { + return indexTime; } - public StatsAccumulator getSearchLatency() { - return searchLatency; + public long getSearchTime() { + return searchTime; + } + + public long getIndexTotal() { + return indexTotal; + } + + public long getSearchTotal() { + return searchTotal; } public void incrementNumPages(long n) { @@ -117,20 +132,21 @@ public void incrementNumOutputDocuments(long n) { numOuputDocuments += n; } - public void incrementBulkFailures() { - this.bulkFailures += 1; + public void incrementIndexingFailures() { + this.indexFailures += 1; } public void incrementSearchFailures() { this.searchFailures += 1; } - public void markStartBulk() { - this.startBulkTime = System.nanoTime(); + public void markStartIndexing() { + this.startIndexTime = System.nanoTime(); } - public void markEndBulk() { - bulkLatency.add((System.nanoTime() - startBulkTime) / 1000000); + public void markEndIndexing() { + indexTime += ((System.nanoTime() - startIndexTime) / 1000000); + indexTotal += 1; } public void markStartSearch() { @@ -138,7 +154,8 @@ public void markStartSearch() { } public void markEndSearch() { - searchLatency.add((System.nanoTime() - startSearchTime) / 1000000); + searchTime += ((System.nanoTime() - startSearchTime) / 1000000); + searchTotal += 1; } @Override @@ -149,9 +166,11 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(numInvocations); // TODO change after backport if (out.getVersion().onOrAfter(Version.CURRENT)) { - bulkLatency.writeTo(out); - searchLatency.writeTo(out); - out.writeVLong(bulkFailures); + out.writeVLong(indexTime); + out.writeVLong(searchTime); + out.writeVLong(indexTotal); + out.writeVLong(searchTotal); + out.writeVLong(indexFailures); out.writeVLong(searchFailures); } } @@ -169,18 +188,20 @@ public boolean equals(Object other) { IndexerJobStats that = (IndexerJobStats) other; return Objects.equals(this.numPages, that.numPages) - && Objects.equals(this.numInputDocuments, that.numInputDocuments) - && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) - && Objects.equals(this.numInvocations, that.numInvocations) - && Objects.equals(this.bulkLatency, that.bulkLatency) - && Objects.equals(this.searchLatency, that.searchLatency) - && Objects.equals(this.bulkFailures, that.bulkFailures) - && Objects.equals(this.searchFailures, that.searchFailures); + && Objects.equals(this.numInputDocuments, that.numInputDocuments) + && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) + && Objects.equals(this.numInvocations, that.numInvocations) + && Objects.equals(this.indexTime, that.indexTime) + && Objects.equals(this.searchTime, that.searchTime) + && Objects.equals(this.indexFailures, that.indexFailures) + && Objects.equals(this.searchFailures, that.searchFailures) + && Objects.equals(this.indexTotal, that.indexTotal) + && Objects.equals(this.searchTotal, that.searchTotal); } @Override public int hashCode() { return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations, - bulkLatency, searchLatency, bulkFailures, searchFailures); + indexTime, searchTime, indexFailures, searchFailures, indexTotal, searchTotal); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/StatsAccumulator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/StatsAccumulator.java deleted file mode 100644 index 363faa5168ca4..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/StatsAccumulator.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.core.indexing; - -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentFragment; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Helper class to collect min, max, avg and total statistics for a quantity. - * - * Derived from ML's version. Ttweaked to be xcontent/writeable and - * handle "empty" scenario differently - */ -public class StatsAccumulator implements Writeable, ToXContentFragment { - - private static final String NAME = "stats_accumulator"; - private static final ParseField MIN = new ParseField("min"); - private static final ParseField MAX = new ParseField("max"); - private static final ParseField AVG = new ParseField("avg"); - private static final ParseField COUNT = new ParseField("count"); - private static final ParseField TOTAL = new ParseField("total"); - - public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(NAME, true, - args -> new StatsAccumulator((long) args[0], (long) args[1], (long) args[2], (long) args[3])); - - static { - PARSER.declareLong(constructorArg(), COUNT); - PARSER.declareLong(constructorArg(), TOTAL); - PARSER.declareLong(constructorArg(), MIN); - PARSER.declareLong(constructorArg(), MAX); - PARSER.declareLong(constructorArg(), AVG); // We parse but don't actually use the avg - } - - private long count = 0; - private long total = 0; - private long min = 0; - private long max = 0; - - public StatsAccumulator() { - } - - public StatsAccumulator(StreamInput in) throws IOException { - count = in.readVLong(); - total = in.readVLong(); - min = in.readVLong(); - max = in.readVLong(); - } - - private StatsAccumulator(long count, long total, long min, long max) { - this.count = count; - this.total = total; - this.min = min; - this.max = max; - } - - public void add(long value) { - min = count == 0 ? value : Math.min(min, value); - max = count == 0 ? value : Math.max(max, value); - count += 1; - total += value; - } - - public long getCount() { - return count; - } - - public long getMin() { - return count == 0 ? 0 : min; - } - - public long getMax() { - return count == 0 ? 0 : max; - } - - public double getAvg() { - return count == 0 ? 0.0 : (double) total / (double) count; - } - - public long getTotal() { - return total; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(COUNT.getPreferredName(), count); - builder.field(TOTAL.getPreferredName(), total); - builder.field(MIN.getPreferredName(), getMin()); - builder.field(MAX.getPreferredName(), getMax()); - builder.field(AVG.getPreferredName(), getAvg()); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVLong(count); - out.writeVLong(total); - out.writeVLong(min); - out.writeVLong(max); - } - - @Override - public int hashCode() { - return Objects.hash(count, total, min, max); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - StatsAccumulator other = (StatsAccumulator) obj; - return Objects.equals(count, other.count) - && Objects.equals(total, other.total) - && Objects.equals(min, other.min) - && Objects.equals(max, other.max); - } -} - diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStats.java index 2d6a4860f601c..f038228ae76a9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStats.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.indexing.IndexerJobStats; -import org.elasticsearch.xpack.core.indexing.StatsAccumulator; import java.io.IOException; @@ -26,24 +25,28 @@ public class RollupIndexerJobStats extends IndexerJobStats { private static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); private static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("rollups_indexed"); private static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); - private static ParseField BULK_LATENCY = new ParseField("bulk_latency_in_ms"); - private static ParseField SEARCH_LATENCY = new ParseField("search_latency_in_ms"); + private static ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms"); + private static ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms"); + private static ParseField INDEX_TOTAL = new ParseField("index_total"); + private static ParseField SEARCH_TOTAL = new ParseField("search_total"); private static ParseField SEARCH_FAILURES = new ParseField("search_failures"); - private static ParseField BULK_FAILURES = new ParseField("bulk_failures"); + private static ParseField INDEX_FAILURES = new ParseField("index_failures"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME.getPreferredName(), args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3], - (StatsAccumulator) args[4], (StatsAccumulator) args[5], (long) args[6], (long) args[7])); + (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9])); static { PARSER.declareLong(constructorArg(), NUM_PAGES); PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); - PARSER.declareObject(constructorArg(), StatsAccumulator.PARSER::apply, BULK_LATENCY); - PARSER.declareObject(constructorArg(), StatsAccumulator.PARSER::apply, SEARCH_LATENCY); - PARSER.declareLong(constructorArg(), BULK_FAILURES); + PARSER.declareLong(constructorArg(), INDEX_TIME_IN_MS); + PARSER.declareLong(constructorArg(), SEARCH_TIME_IN_MS); + PARSER.declareLong(constructorArg(), INDEX_TOTAL); + PARSER.declareLong(constructorArg(), SEARCH_TOTAL); + PARSER.declareLong(constructorArg(), INDEX_FAILURES); PARSER.declareLong(constructorArg(), SEARCH_FAILURES); } @@ -52,9 +55,10 @@ public RollupIndexerJobStats() { } public RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, - StatsAccumulator bulkLatency, StatsAccumulator searchLatency, long bulkFailures, + long indexTime, long searchTime, long indexTotal, long searchTotal, long indexFailures, long searchFailures) { - super(numPages, numInputDocuments, numOuputDocuments, numInvocations, bulkLatency, searchLatency, bulkFailures, searchFailures); + super(numPages, numInputDocuments, numOuputDocuments, numInvocations, indexTime, searchTime, + indexTotal, searchTotal, indexFailures, searchFailures); } public RollupIndexerJobStats(StreamInput in) throws IOException { @@ -68,17 +72,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(NUM_INPUT_DOCUMENTS.getPreferredName(), numInputDocuments); builder.field(NUM_OUTPUT_DOCUMENTS.getPreferredName(), numOuputDocuments); builder.field(NUM_INVOCATIONS.getPreferredName(), numInvocations); - if (bulkLatency != null) { - builder.startObject(BULK_LATENCY.getPreferredName()); - bulkLatency.toXContent(builder, params); - builder.endObject(); - } - if (searchLatency != null) { - builder.startObject(SEARCH_LATENCY.getPreferredName()); - searchLatency.toXContent(builder, params); - builder.endObject(); - } - builder.field(BULK_FAILURES.getPreferredName(), bulkFailures); + builder.field(INDEX_TIME_IN_MS.getPreferredName(), indexTime); + builder.field(INDEX_TOTAL.getPreferredName(), indexTotal); + builder.field(INDEX_FAILURES.getPreferredName(), indexFailures); + builder.field(SEARCH_TIME_IN_MS.getPreferredName(), searchTime); + builder.field(SEARCH_TOTAL.getPreferredName(), searchTotal); builder.field(SEARCH_FAILURES.getPreferredName(), searchFailures); builder.endObject(); return builder; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java index bba472eafe197..cca60bbf33eec 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.indexing.IndexerState; -import org.elasticsearch.xpack.core.indexing.StatsAccumulator; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction; @@ -43,8 +42,8 @@ protected GetRollupJobsAction.JobWrapper createTestInstance() { return new GetRollupJobsAction.JobWrapper(ConfigTestHelpers.randomRollupJobConfig(random()), new RollupIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), randomNonNegativeLong(), new StatsAccumulator(), new StatsAccumulator(), - randomNonNegativeLong(), randomNonNegativeLong()), + randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), new RollupJobStatus(state, Collections.emptyMap(), randomBoolean())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStatsTests.java index 5d064fe869cb6..9cb57c3ecdc60 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupIndexerJobStatsTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; -import org.elasticsearch.xpack.core.indexing.StatsAccumulator; public class RollupIndexerJobStatsTests extends AbstractSerializingTestCase { @@ -29,8 +28,8 @@ protected RollupIndexerJobStats doParseInstance(XContentParser parser) { public static RollupIndexerJobStats randomStats() { return new RollupIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), randomNonNegativeLong(), new StatsAccumulator(), new StatsAccumulator(), - randomNonNegativeLong(), randomNonNegativeLong()); + randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); } @Override diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 0f4cfd43b1f95..cbf85e84b16c3 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; -import org.elasticsearch.xpack.core.indexing.StatsAccumulator; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; @@ -67,7 +66,7 @@ public class IndexerUtilsTests extends AggregatorTestCase { public void testMissingFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); + RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -131,7 +130,7 @@ public void testMissingFields() throws IOException { public void testCorrectFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); + RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -199,7 +198,7 @@ public void testCorrectFields() throws IOException { public void testNumericTerms() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats= new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); + RollupIndexerJobStats stats= new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String valueField = "the_avg"; @@ -255,7 +254,7 @@ public void testNumericTerms() throws IOException { public void testEmptyCounts() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); + RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String timestampField = "ts"; String valueField = "the_avg"; @@ -491,7 +490,7 @@ public void testNullKeys() { public void testMissingBuckets() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, new StatsAccumulator(), new StatsAccumulator(), 0, 0); + RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String metricField = "metric_field"; String valueField = "value_field"; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java index fbec9100a22dd..baa35ff2fca79 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java @@ -20,11 +20,11 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJob; -import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; import org.mockito.stubbing.Answer; @@ -42,7 +42,6 @@ import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.startsWith; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -229,11 +228,10 @@ public void testStarted() throws Exception { ESTestCase.awaitBusy(() -> indexer.getState() == IndexerState.STARTED); assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumPages(), equalTo(1L)); - assertThat(indexer.getStats().getBulkFailures(), equalTo(0L)); + assertThat(indexer.getStats().getIndexFailures(), equalTo(0L)); assertThat(indexer.getStats().getSearchFailures(), equalTo(0L)); - // We can't check the actual min/max/avg stats since they may be too fast to register - // but we can check the count having incremented - assertThat(indexer.getStats().getSearchLatency().getCount(), greaterThan(0L)); + assertThat(indexer.getStats().getSearchTotal(), equalTo(1L)); + assertThat(indexer.getStats().getIndexTotal(), equalTo(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -263,11 +261,9 @@ protected void onFinish() { assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumPages(), equalTo(1L)); - assertThat(indexer.getStats().getBulkFailures(), equalTo(0L)); + assertThat(indexer.getStats().getIndexFailures(), equalTo(0L)); assertThat(indexer.getStats().getSearchFailures(), equalTo(0L)); - // We can't check the actual min/max/avg stats since they may be too fast to register - // but we can check the count having incremented - assertThat(indexer.getStats().getSearchLatency().getCount(), greaterThan(0L)); + assertThat(indexer.getStats().getSearchTotal(), equalTo(1L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -917,7 +913,7 @@ protected void doNextBulk(BulkRequest request, ActionListener next assertThat(indexer.getStats().getNumPages(), equalTo(1L)); // There should be one recorded failure - assertThat(indexer.getStats().getBulkFailures(), equalTo(1L)); + assertThat(indexer.getStats().getIndexFailures(), equalTo(1L)); // Note: no docs were indexed assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml index 9769e7efb2305..95ea6a57b37aa 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml @@ -70,20 +70,12 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 - bulk_failures: 0 search_failures: 0 - bulk_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 - search_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 + index_failures: 0 + index_time_in_ms: 0 + index_total: 0 + search_time_in_ms: 0 + search_total: 0 status: job_state: "stopped" upgraded_doc_id: true @@ -131,20 +123,12 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 - bulk_failures: 0 search_failures: 0 - bulk_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 - search_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 + index_failures: 0 + index_time_in_ms: 0 + index_total: 0 + search_time_in_ms: 0 + search_total: 0 status: job_state: "stopped" upgraded_doc_id: true @@ -192,20 +176,12 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 - bulk_failures: 0 search_failures: 0 - bulk_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 - search_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 + index_failures: 0 + index_time_in_ms: 0 + index_total: 0 + search_time_in_ms: 0 + search_total: 0 status: job_state: "stopped" upgraded_doc_id: true diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml index 2e929ac19cb4f..3e03ac924ec89 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -71,20 +71,12 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 - bulk_failures: 0 search_failures: 0 - bulk_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 - search_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 + index_failures: 0 + index_time_in_ms: 0 + index_total: 0 + search_time_in_ms: 0 + search_total: 0 status: job_state: "stopped" upgraded_doc_id: true @@ -222,20 +214,12 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 - bulk_failures: 0 search_failures: 0 - bulk_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 - search_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 + index_failures: 0 + index_time_in_ms: 0 + index_total: 0 + search_time_in_ms: 0 + search_total: 0 status: job_state: "stopped" upgraded_doc_id: true diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml index 45c247b1d2fb3..7f3f0347ec0df 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml @@ -71,20 +71,12 @@ setup: documents_processed: 0 rollups_indexed: 0 trigger_count: 0 - bulk_failures: 0 search_failures: 0 - bulk_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 - search_latency_in_ms: - count: 0 - min: 0 - max: 0 - avg: 0.0 - total: 0 + index_failures: 0 + index_time_in_ms: 0 + index_total: 0 + search_time_in_ms: 0 + search_total: 0 status: job_state: "stopped" upgraded_doc_id: true From 1856fbe1f69501539189e7bed808f77ae56ed606 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Mon, 26 Nov 2018 12:05:30 -0500 Subject: [PATCH 3/3] Remove dead ParseFields --- .../elasticsearch/client/rollup/GetRollupJobResponse.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java index 3f48c9a36d9b3..c01914ed2fd94 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java @@ -57,11 +57,6 @@ public class GetRollupJobResponse { static final ParseField SEARCH_TOTAL = new ParseField("search_total"); static final ParseField SEARCH_FAILURES = new ParseField("search_failures"); static final ParseField INDEX_FAILURES = new ParseField("index_failures"); - static final ParseField MIN = new ParseField("min"); - static final ParseField MAX = new ParseField("max"); - static final ParseField AVG = new ParseField("avg"); - static final ParseField COUNT = new ParseField("count"); - static final ParseField TOTAL = new ParseField("total"); private List jobs;