From dbc10699e45e8b17f0db56e0cf771d6cef7d180c Mon Sep 17 00:00:00 2001 From: Dave Li Date: Thu, 30 Jun 2016 12:11:29 -0700 Subject: [PATCH 01/10] add first and last aggregator --- .../aggregator/LongMaxBenchmark.java | 38 +++ .../benchmark/query/GroupByBenchmark.java | 6 +- .../io/druid/jackson/AggregatorsModule.java | 6 +- .../aggregation/DoubleFirstAggregator.java | 91 ++++++ .../DoubleFirstBufferAggregator.java | 79 +++++ .../aggregation/DoubleLastAggregator.java | 88 ++++++ .../DoubleLastBufferAggregator.java | 77 +++++ .../aggregation/FirstAggregatorFactory.java | 284 ++++++++++++++++++ .../aggregation/LastAggregatorFactory.java | 277 +++++++++++++++++ .../aggregation/LongFirstAggregator.java | 89 ++++++ .../LongFirstBufferAggregator.java | 78 +++++ .../query/aggregation/LongLastAggregator.java | 86 ++++++ .../aggregation/LongLastBufferAggregator.java | 76 +++++ .../io/druid/query/QueryRunnerTestHelper.java | 1 + .../aggregation/FirstAggregationTest.java | 206 +++++++++++++ .../aggregation/LastAggregationTest.java | 227 ++++++++++++++ .../query/groupby/GroupByQueryRunnerTest.java | 45 +++ 17 files changed, 1751 insertions(+), 3 deletions(-) create mode 100644 benchmarks/src/main/java/io/druid/benchmark/aggregator/LongMaxBenchmark.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java create mode 100644 processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java create mode 100644 processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java diff --git a/benchmarks/src/main/java/io/druid/benchmark/aggregator/LongMaxBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/aggregator/LongMaxBenchmark.java new file mode 100644 index 000000000000..025bbd566725 --- /dev/null +++ b/benchmarks/src/main/java/io/druid/benchmark/aggregator/LongMaxBenchmark.java @@ -0,0 +1,38 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.benchmark.aggregator; + +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +@State(Scope.Benchmark) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +public class LongMaxBenchmark +{ + @Setup + public void setup() + { + + } +} diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java index a520ff338d17..cc3bedceb39a 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java @@ -51,6 +51,8 @@ import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryToolChest; import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.FirstAggregatorFactory; +import io.druid.query.aggregation.LastAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.dimension.DefaultDimensionSpec; @@ -105,8 +107,8 @@ @State(Scope.Benchmark) @Fork(jvmArgsPrepend = "-server", value = 1) -@Warmup(iterations = 10) -@Measurement(iterations = 25) +@Warmup(iterations = 0) +@Measurement(iterations = 1) public class GroupByBenchmark { @Param({"4"}) diff --git a/processing/src/main/java/io/druid/jackson/AggregatorsModule.java b/processing/src/main/java/io/druid/jackson/AggregatorsModule.java index 9d18ba252fa1..cd48b623dfb9 100644 --- a/processing/src/main/java/io/druid/jackson/AggregatorsModule.java +++ b/processing/src/main/java/io/druid/jackson/AggregatorsModule.java @@ -29,8 +29,10 @@ import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; +import io.druid.query.aggregation.FirstAggregatorFactory; import io.druid.query.aggregation.HistogramAggregatorFactory; import io.druid.query.aggregation.JavaScriptAggregatorFactory; +import io.druid.query.aggregation.LastAggregatorFactory; import io.druid.query.aggregation.LongMaxAggregatorFactory; import io.druid.query.aggregation.LongMinAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -75,7 +77,9 @@ public AggregatorsModule() @JsonSubTypes.Type(name = "histogram", value = HistogramAggregatorFactory.class), @JsonSubTypes.Type(name = "hyperUnique", value = HyperUniquesAggregatorFactory.class), @JsonSubTypes.Type(name = "cardinality", value = CardinalityAggregatorFactory.class), - @JsonSubTypes.Type(name = "filtered", value = FilteredAggregatorFactory.class) + @JsonSubTypes.Type(name = "filtered", value = FilteredAggregatorFactory.class), + @JsonSubTypes.Type(name = "first", value = FirstAggregatorFactory.class), + @JsonSubTypes.Type(name = "last", value = LastAggregatorFactory.class) }) public static interface AggregatorFactoryMixin { diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java new file mode 100644 index 000000000000..0dc0cf06ed5d --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java @@ -0,0 +1,91 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.metamx.common.Pair; +import io.druid.segment.FloatColumnSelector; +import io.druid.segment.LongColumnSelector; + +public class DoubleFirstAggregator implements Aggregator +{ + + private final FloatColumnSelector valueSelector; + private final LongColumnSelector timeSelector; + private final String name; + + long firstTime; + double firstValue; + + public DoubleFirstAggregator(String name, FloatColumnSelector valueSelector, LongColumnSelector timeSelector) + { + this.name = name; + this.valueSelector = valueSelector; + this.timeSelector = timeSelector; + + reset(); + } + + @Override + public void aggregate() + { + if (firstTime == -1) { + firstTime = timeSelector.get(); + firstValue = valueSelector.get(); + } + } + + @Override + public void reset() + { + firstTime = -1; + firstValue = 0; + } + + @Override + public Object get() + { + return new Pair<>(firstTime, firstValue); + } + + @Override + public float getFloat() + { + return (float) firstValue; + } + + @Override + public String getName() + { + return name; + } + + @Override + public void close() + { + + } + + @Override + public long getLong() + { + return (long) firstValue; + } +} + diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java new file mode 100644 index 000000000000..3c504960a6e3 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java @@ -0,0 +1,79 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.google.common.primitives.Longs; +import com.metamx.common.Pair; +import io.druid.segment.FloatColumnSelector; +import io.druid.segment.LongColumnSelector; + +import java.nio.ByteBuffer; + +public class DoubleFirstBufferAggregator implements BufferAggregator +{ + private final LongColumnSelector timeSelector; + private final FloatColumnSelector valueSelector; + + public DoubleFirstBufferAggregator(LongColumnSelector timeSelector, FloatColumnSelector valueSelector) + { + this.timeSelector = timeSelector; + this.valueSelector = valueSelector; + } + + @Override + public void init(ByteBuffer buf, int position) + { + buf.putLong(position, -1); + buf.putLong(position + Longs.BYTES, 0); + } + + @Override + public void aggregate(ByteBuffer buf, int position) + { + if (buf.getLong(position) == -1) { + buf.putLong(position, timeSelector.get()); + buf.putDouble(position + Longs.BYTES, valueSelector.get()); + } + } + + @Override + public Object get(ByteBuffer buf, int position) + { + return new Pair<>(buf.getLong(position), buf.getDouble(position + Longs.BYTES)); + } + + @Override + public float getFloat(ByteBuffer buf, int position) + { + return (float) buf.getDouble(position + Longs.BYTES); + } + + @Override + public long getLong(ByteBuffer buf, int position) + { + return (long) buf.getDouble(position + Longs.BYTES); + } + + @Override + public void close() + { + // no resources to cleanup + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java new file mode 100644 index 000000000000..d92249d2a3b5 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java @@ -0,0 +1,88 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.metamx.common.Pair; +import io.druid.segment.FloatColumnSelector; +import io.druid.segment.LongColumnSelector; + +public class DoubleLastAggregator implements Aggregator +{ + + private final FloatColumnSelector valueSelector; + private final LongColumnSelector timeSelector; + private final String name; + + long lastTime; + double lastValue; + + public DoubleLastAggregator(String name, FloatColumnSelector valueSelector, LongColumnSelector timeSelector) + { + this.name = name; + this.valueSelector = valueSelector; + this.timeSelector = timeSelector; + + reset(); + } + + @Override + public void aggregate() + { + lastTime = timeSelector.get(); + lastValue = valueSelector.get(); + } + + @Override + public void reset() + { + lastTime = -1; + lastValue = 0; + } + + @Override + public Object get() + { + return new Pair<>(lastTime, lastValue); + } + + @Override + public float getFloat() + { + return (float) lastValue; + } + + @Override + public String getName() + { + return name; + } + + @Override + public void close() + { + + } + + @Override + public long getLong() + { + return (long) lastValue; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java new file mode 100644 index 000000000000..2d5431ef2b7b --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java @@ -0,0 +1,77 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.google.common.primitives.Longs; +import com.metamx.common.Pair; +import io.druid.segment.FloatColumnSelector; +import io.druid.segment.LongColumnSelector; + +import java.nio.ByteBuffer; + +public class DoubleLastBufferAggregator implements BufferAggregator +{ + private final LongColumnSelector timeSelector; + private final FloatColumnSelector valueSelector; + + public DoubleLastBufferAggregator(LongColumnSelector timeSelector, FloatColumnSelector valueSelector) + { + this.timeSelector = timeSelector; + this.valueSelector = valueSelector; + } + + @Override + public void init(ByteBuffer buf, int position) + { + buf.putLong(position, -1); + buf.putLong(position + Longs.BYTES, 0); + } + + @Override + public void aggregate(ByteBuffer buf, int position) + { + buf.putLong(position, timeSelector.get()); + buf.putDouble(position + Longs.BYTES, valueSelector.get()); + } + + @Override + public Object get(ByteBuffer buf, int position) + { + return new Pair<>(buf.getLong(position), buf.getDouble(position + Longs.BYTES)); + } + + @Override + public float getFloat(ByteBuffer buf, int position) + { + return (float) buf.getDouble(position + Longs.BYTES); + } + + @Override + public long getLong(ByteBuffer buf, int position) + { + return (long) buf.getDouble(position + Longs.BYTES); + } + + @Override + public void close() + { + // no resources to cleanup + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java new file mode 100644 index 000000000000..1176086deb00 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java @@ -0,0 +1,284 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import com.google.common.primitives.Doubles; +import com.google.common.primitives.Longs; +import com.metamx.common.IAE; +import com.metamx.common.Pair; +import com.metamx.common.StringUtils; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ObjectColumnSelector; +import io.druid.segment.column.Column; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; + +public class FirstAggregatorFactory extends AggregatorFactory +{ + private final String fieldName; + private final String name; + private final String value; + + @JsonCreator + public FirstAggregatorFactory( + @JsonProperty("name") String name, + @JsonProperty("fieldName") final String fieldName, + @JsonProperty("value") String value + ) + { + Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); + Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); + Preconditions.checkArgument(value.equals("long") || value.equals("double"), "Must have a valid, non-null type"); + + this.name = name; + this.fieldName = fieldName; + this.value = value; + } + + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + if (value.equals("long")) { + return new LongFirstAggregator( + name, metricFactory.makeLongColumnSelector(fieldName), + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + ); + } else if (value.equals("double")) { + return new DoubleFirstAggregator( + name, metricFactory.makeFloatColumnSelector(fieldName), + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + ); + } + throw new IAE("undefined type"); + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + if (value.equals("long")) { + return new LongFirstBufferAggregator( + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeLongColumnSelector(fieldName) + ); + } else if (value.equals("double")) { + return new DoubleFirstBufferAggregator( + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeFloatColumnSelector(fieldName) + ); + } + throw new IAE("undefined type"); + } + + @Override + public Comparator getComparator() + { + return new Comparator() + { + @Override + public int compare(Object o1, Object o2) + { + return Longs.compare(((Pair) o1).lhs, ((Pair) o2).lhs); + } + }; + } + + @Override + public Object combine(Object lhs, Object rhs) + { + return (((Pair) lhs).lhs <= ((Pair) rhs).lhs) ? lhs : rhs; + } + + @Override + public AggregatorFactory getCombiningFactory() + { + return new FirstAggregatorFactory(name, name, value) { + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + if (value.equals("long")) { + return new LongFirstAggregator(name, null, null) { + @Override + public void aggregate() + { + if (firstTime == -1) { + Pair pair = (Pair)selector.get(); + firstTime = pair.lhs; + firstValue = pair.rhs; + } + } + }; + } else if (value.equals("double")) { + return new DoubleFirstAggregator(name, null, null) { + @Override + public void aggregate() + { + if (firstTime == -1) { + Pair pair = (Pair)selector.get(); + firstTime = pair.lhs; + firstValue = pair.rhs; + } + } + }; + } + throw new IAE("undefined type"); + } + }; + } + + @Override + public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException + { + if (other.getName().equals(this.getName()) && this.getClass() == other.getClass() && + other.getTypeName().equals(this.getTypeName())) { + return getCombiningFactory(); + } else { + throw new AggregatorFactoryNotMergeableException(this, other); + } + } + + @Override + public List getRequiredColumns() + { + return Arrays.asList(new FirstAggregatorFactory(fieldName, fieldName, value)); + } + + @Override + public Object deserialize(Object object) + { + return object; + } + + @Override + public Object finalizeComputation(Object object) + { + return ((Pair) object).rhs; + } + + @Override + @JsonProperty + public String getName() + { + return name; + } + + @JsonProperty + public String getFieldName() + { + return fieldName; + } + + @JsonProperty + public String getValue() + { + return value; + } + + @Override + public List requiredFields() + { + return Arrays.asList(Column.TIME_COLUMN_NAME, fieldName); + } + + @Override + public byte[] getCacheKey() + { + byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); + return ByteBuffer.allocate(1 + fieldNameBytes.length).put((byte) 0x10).put(fieldNameBytes).array(); + } + + @Override + public String getTypeName() + { + return value.equals("double") ? "float" : value; + } + + @Override + public int getMaxIntermediateSize() + { + if (value.equals("long")) { + return Longs.BYTES * 2; + } else if (value.equals("double")) { + return Longs.BYTES + Doubles.BYTES; + } + throw new IAE("undefined type"); + } + + @Override + public Object getAggregatorStartValue() + { + if (value.equals("long")) { + return new Pair<>(-1L, 0L); + } else if (value.equals("double")) { + return new Pair<>(-1L, 0D); + } + throw new IAE("undefined type"); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + FirstAggregatorFactory that = (FirstAggregatorFactory) o; + + if (!fieldName.equals(that.fieldName)) { + return false; + } + if (!name.equals(that.name)) { + return false; + } + if (!value.equals(that.value)) { + return false; + } + + return true; + } + + @Override + public int hashCode() + { + int result = name.hashCode(); + result = 31 * result + fieldName.hashCode(); + result = 31 * result + value.hashCode(); + return result; + } + + @Override + public String toString() + { + return "FirstAggregatorFactory{" + + "name='" + name + '\'' + + ", fieldName='" + fieldName + '\'' + + ", value='" + value + '\'' + + '}'; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java new file mode 100644 index 000000000000..493215598679 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java @@ -0,0 +1,277 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import com.google.common.primitives.Doubles; +import com.google.common.primitives.Longs; +import com.metamx.common.IAE; +import com.metamx.common.Pair; +import com.metamx.common.StringUtils; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ObjectColumnSelector; +import io.druid.segment.column.Column; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; + +public class LastAggregatorFactory extends AggregatorFactory +{ + private final String fieldName; + private final String name; + private final String value; + + @JsonCreator + public LastAggregatorFactory( + @JsonProperty("name") String name, + @JsonProperty("fieldName") final String fieldName, + @JsonProperty("value") String value + ) + { + Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); + Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); + Preconditions.checkArgument(value.equals("long") || value.equals("double"), "Must have a valid, non-null type"); + + this.name = name; + this.fieldName = fieldName; + this.value = value; + } + + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + if (value.equals("long")) { + return new LongLastAggregator( + name, metricFactory.makeLongColumnSelector(fieldName), + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + ); + } else if (value.equals("double")) { + return new DoubleLastAggregator( + name, metricFactory.makeFloatColumnSelector(fieldName), + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + ); + } + throw new IAE("undefined type"); + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + if (value.equals("long")) { + return new LongLastBufferAggregator( + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeLongColumnSelector(fieldName) + ); + } else if (value.equals("double")) { + return new DoubleLastBufferAggregator( + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeFloatColumnSelector(fieldName) + ); + } + throw new IAE("undefined type"); + } + + @Override + public Comparator getComparator() + { + return new Comparator() + { + @Override + public int compare(Object o1, Object o2) + { + return Longs.compare(((Pair) o1).lhs, ((Pair) o2).lhs); + } + }; + } + + @Override + public Object combine(Object lhs, Object rhs) + { + return (((Pair) lhs).lhs > ((Pair) rhs).lhs) ? lhs : rhs; + } + + @Override + public AggregatorFactory getCombiningFactory() + { + return new LastAggregatorFactory(name, name, value) { + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + if (value.equals("long")) { + return new LongLastAggregator(name, null, null) { + @Override + public void aggregate() + { + Pair pair = (Pair)selector.get(); + lastTime = pair.lhs; + lastValue = pair.rhs; + } + }; + } else if (value.equals("double")) { + return new DoubleLastAggregator(name, null, null) { + @Override + public void aggregate() + { + Pair pair = (Pair)selector.get(); + lastTime = pair.lhs; + lastValue = pair.rhs; + } + }; + } + throw new IAE("undefined type"); + } + }; + } + + @Override + public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException + { + if (other.getName().equals(this.getName()) && this.getClass() == other.getClass() && + other.getTypeName().equals(this.getTypeName())) { + return getCombiningFactory(); + } else { + throw new AggregatorFactoryNotMergeableException(this, other); + } + } + + @Override + public List getRequiredColumns() + { + return Arrays.asList(new LastAggregatorFactory(fieldName, fieldName, value)); + } + + @Override + public Object deserialize(Object object) + { + return object; + } + + @Override + public Object finalizeComputation(Object object) + { + return ((Pair) object).rhs; + } + + @Override + @JsonProperty + public String getName() + { + return name; + } + + @JsonProperty + public String getFieldName() + { + return fieldName; + } + + @JsonProperty + public String getValue() + { + return value; + } + + @Override + public List requiredFields() + { + return Arrays.asList(Column.TIME_COLUMN_NAME, fieldName); + } + + @Override + public byte[] getCacheKey() + { + byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); + return ByteBuffer.allocate(1 + fieldNameBytes.length).put((byte) 0x11).put(fieldNameBytes).array(); + } + + @Override + public String getTypeName() + { + return value.equals("double") ? "float" : value; + } + + @Override + public int getMaxIntermediateSize() + { + if (value.equals("long")) { + return Longs.BYTES * 2; + } else if (value.equals("double")) { + return Longs.BYTES + Doubles.BYTES; + } + throw new IAE("undefined type"); + } + + @Override + public Object getAggregatorStartValue() + { + if (value.equals("long")) { + return new Pair<>(-1L, 0L); + } else if (value.equals("double")) { + return new Pair<>(-1L, 0D); + } + throw new IAE("undefined type"); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + LastAggregatorFactory that = (LastAggregatorFactory) o; + + if (!fieldName.equals(that.fieldName)) { + return false; + } + if (!name.equals(that.name)) { + return false; + } + return value.equals(that.value); + + } + + @Override + public int hashCode() + { + int result = name.hashCode(); + result = 31 * result + fieldName.hashCode(); + result = 31 * result + value.hashCode(); + return result; + } + + @Override + public String toString() + { + return "LastAggregatorFactory{" + + "name='" + name + '\'' + + ", fieldName='" + fieldName + '\'' + + ", value='" + value + '\'' + + '}'; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java b/processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java new file mode 100644 index 000000000000..fd98a12d8417 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java @@ -0,0 +1,89 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.metamx.common.Pair; +import io.druid.segment.LongColumnSelector; + +public class LongFirstAggregator implements Aggregator +{ + + private final LongColumnSelector valueSelector; + private final LongColumnSelector timeSelector; + private final String name; + + long firstTime; + long firstValue; + + public LongFirstAggregator(String name, LongColumnSelector valueSelector, LongColumnSelector timeSelector) + { + this.name = name; + this.valueSelector = valueSelector; + this.timeSelector = timeSelector; + + reset(); + } + + @Override + public void aggregate() + { + if (firstTime == -1) { + firstTime = timeSelector.get(); + firstValue = valueSelector.get(); + } + } + + @Override + public void reset() + { + firstTime = -1; + firstValue = 0; + } + + @Override + public Object get() + { + return new Pair<>(firstTime, firstValue); + } + + @Override + public float getFloat() + { + return (float) firstValue; + } + + @Override + public String getName() + { + return name; + } + + @Override + public void close() + { + + } + + @Override + public long getLong() + { + return firstValue; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java new file mode 100644 index 000000000000..fa6e4eedfc71 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java @@ -0,0 +1,78 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.google.common.primitives.Longs; +import com.metamx.common.Pair; +import io.druid.segment.LongColumnSelector; + +import java.nio.ByteBuffer; + +public class LongFirstBufferAggregator implements BufferAggregator +{ + private final LongColumnSelector timeSelector; + private final LongColumnSelector valueSelector; + + public LongFirstBufferAggregator(LongColumnSelector timeSelector, LongColumnSelector valueSelector) + { + this.timeSelector = timeSelector; + this.valueSelector = valueSelector; + } + + @Override + public void init(ByteBuffer buf, int position) + { + buf.putLong(position, -1); + buf.putLong(position + Longs.BYTES, 0); + } + + @Override + public void aggregate(ByteBuffer buf, int position) + { + if (buf.getLong(position) == -1) { + buf.putLong(position, timeSelector.get()); + buf.putLong(position + Longs.BYTES, valueSelector.get()); + } + } + + @Override + public Object get(ByteBuffer buf, int position) + { + return new Pair<>(buf.getLong(position), buf.getLong(position + Longs.BYTES)); + } + + @Override + public float getFloat(ByteBuffer buf, int position) + { + return (float) buf.getLong(position + Longs.BYTES); + } + + @Override + public long getLong(ByteBuffer buf, int position) + { + return buf.getLong(position + Longs.BYTES); + } + + @Override + public void close() + { + // no resources to cleanup + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java b/processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java new file mode 100644 index 000000000000..63926af68f39 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java @@ -0,0 +1,86 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.metamx.common.Pair; +import io.druid.segment.LongColumnSelector; + +public class LongLastAggregator implements Aggregator +{ + private final LongColumnSelector valueSelector; + private final LongColumnSelector timeSelector; + private final String name; + + long lastTime; + long lastValue; + + public LongLastAggregator(String name, LongColumnSelector valueSelector, LongColumnSelector timeSelector) + { + this.name = name; + this.valueSelector = valueSelector; + this.timeSelector = timeSelector; + + reset(); + } + + @Override + public void aggregate() + { + lastTime = timeSelector.get(); + lastValue = valueSelector.get(); + } + + @Override + public void reset() + { + lastTime = -1; + lastValue = 0; + } + + @Override + public Object get() + { + return new Pair<>(lastTime, lastValue); + } + + @Override + public float getFloat() + { + return (float) lastValue; + } + + @Override + public String getName() + { + return name; + } + + @Override + public void close() + { + + } + + @Override + public long getLong() + { + return lastValue; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java new file mode 100644 index 000000000000..1967d1e61bf9 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java @@ -0,0 +1,76 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.google.common.primitives.Longs; +import com.metamx.common.Pair; +import io.druid.segment.LongColumnSelector; + +import java.nio.ByteBuffer; + +public class LongLastBufferAggregator implements BufferAggregator +{ + private final LongColumnSelector timeSelector; + private final LongColumnSelector valueSelector; + + public LongLastBufferAggregator(LongColumnSelector timeSelector, LongColumnSelector valueSelector) + { + this.timeSelector = timeSelector; + this.valueSelector = valueSelector; + } + + @Override + public void init(ByteBuffer buf, int position) + { + buf.putLong(position, -1); + buf.putLong(position + Longs.BYTES, 0); + } + + @Override + public void aggregate(ByteBuffer buf, int position) + { + buf.putLong(position, timeSelector.get()); + buf.putLong(position + Longs.BYTES, valueSelector.get()); + } + + @Override + public Object get(ByteBuffer buf, int position) + { + return new Pair<>(buf.getLong(position), buf.getLong(position + Longs.BYTES)); + } + + @Override + public float getFloat(ByteBuffer buf, int position) + { + return (float) buf.getLong(position + Longs.BYTES); + } + + @Override + public long getLong(ByteBuffer buf, int position) + { + return buf.getLong(position + Longs.BYTES); + } + + @Override + public void close() + { + // no resources to cleanup + } +} diff --git a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java index 8cfe775c897e..86c033357bc2 100644 --- a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java @@ -106,6 +106,7 @@ public TableDataSource apply(@Nullable String input) public static final DateTime minTime = new DateTime("2011-01-12T00:00:00.000Z"); public static final QueryGranularity dayGran = QueryGranularities.DAY; + public static final QueryGranularity monthGran = QueryGranularities.MONTH; public static final QueryGranularity allGran = QueryGranularities.ALL; public static final String timeDimension = "__time"; public static final String marketDimension = "market"; diff --git a/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java new file mode 100644 index 000000000000..822e97e920ce --- /dev/null +++ b/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java @@ -0,0 +1,206 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.metamx.common.Pair; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.column.Column; +import org.easymock.EasyMock; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.nio.ByteBuffer; + +public class FirstAggregationTest +{ + private FirstAggregatorFactory doubleFirstAggFactory; + private FirstAggregatorFactory longFirstAggFactory; + private ColumnSelectorFactory colSelectorFactory; + private TestLongColumnSelector timeSelector; + private TestFloatColumnSelector floatSelector; + private TestLongColumnSelector longSelector; + + private long[] longValues = {62, 8, 54, 2}; + private float[] floatValues = {1.1f, 2.7f, 3.5f, 1.3f}; + private long[] times = {1467225096, 146722598, 1467225099, 1467225111}; + + public FirstAggregationTest() throws Exception + { + String doubleSpecJson = "{\"type\": \"first\", \"name\": \"billy\", \"fieldName\": \"nilly\", \"value\": \"double\"}"; + String longSpecJson = "{\"type\": \"first\", \"name\": \"bill\", \"fieldName\": \"nnn\", \"value\": \"long\"}"; + doubleFirstAggFactory = new DefaultObjectMapper().readValue(doubleSpecJson , FirstAggregatorFactory.class); + longFirstAggFactory = new DefaultObjectMapper().readValue(longSpecJson , FirstAggregatorFactory.class); + } + + @Before + public void setup() + { + timeSelector = new TestLongColumnSelector(times); + floatSelector = new TestFloatColumnSelector(floatValues); + longSelector = new TestLongColumnSelector(longValues); + colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME)).andReturn(timeSelector); + EasyMock.expect(colSelectorFactory.makeFloatColumnSelector("nilly")).andReturn(floatSelector); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector("nnn")).andReturn(longSelector); + EasyMock.replay(colSelectorFactory); + } + + @Test + public void testDoubleFirstAggregator() + { + DoubleFirstAggregator agg = (DoubleFirstAggregator) doubleFirstAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(timeSelector, floatSelector, agg); + aggregate(timeSelector, floatSelector, agg); + aggregate(timeSelector, floatSelector, agg); + aggregate(timeSelector, floatSelector, agg); + + Pair result = (Pair)agg.get(); + + Assert.assertEquals(times[0], result.lhs.longValue()); + Assert.assertEquals(floatValues[0], result.rhs, 0.0001); + Assert.assertEquals((long)floatValues[0], agg.getLong()); + Assert.assertEquals(floatValues[0], agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair)agg.get()).rhs, 0.0001); + } + + @Test + public void testDoubleFirstBufferAggregator() + { + DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) doubleFirstAggFactory.factorizeBuffered(colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(timeSelector, floatSelector, agg, buffer, 0); + aggregate(timeSelector, floatSelector, agg, buffer, 0); + aggregate(timeSelector, floatSelector, agg, buffer, 0); + aggregate(timeSelector, floatSelector, agg, buffer, 0); + + Pair result = (Pair)agg.get(buffer, 0); + + Assert.assertEquals(times[0], result.lhs.longValue()); + Assert.assertEquals(floatValues[0], result.rhs, 0.0001); + Assert.assertEquals((long) floatValues[0], agg.getLong(buffer, 0)); + Assert.assertEquals(floatValues[0], agg.getFloat(buffer, 0), 0.0001); + } + + @Test + public void testLongFirstAggregator() + { + LongFirstAggregator agg = (LongFirstAggregator) longFirstAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("bill", agg.getName()); + + aggregate(timeSelector, longSelector, agg); + aggregate(timeSelector, longSelector, agg); + aggregate(timeSelector, longSelector, agg); + aggregate(timeSelector, longSelector, agg); + + Pair result = (Pair)agg.get(); + + Assert.assertEquals(times[0], result.lhs.longValue()); + Assert.assertEquals(longValues[0], result.rhs.longValue()); + Assert.assertEquals(longValues[0], agg.getLong()); + Assert.assertEquals(longValues[0], agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair)agg.get()).rhs.longValue()); + } + + @Test + public void testLongFirstBufferAggregator() + { + LongFirstBufferAggregator agg = (LongFirstBufferAggregator) longFirstAggFactory.factorizeBuffered(colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(timeSelector, longSelector, agg, buffer, 0); + aggregate(timeSelector, longSelector, agg, buffer, 0); + aggregate(timeSelector, longSelector, agg, buffer, 0); + aggregate(timeSelector, longSelector, agg, buffer, 0); + + Pair result = (Pair)agg.get(buffer, 0); + + Assert.assertEquals(times[0], result.lhs.longValue()); + Assert.assertEquals(longValues[0], result.rhs.longValue()); + Assert.assertEquals(longValues[0], agg.getLong(buffer, 0)); + Assert.assertEquals(longValues[0], agg.getFloat(buffer, 0), 0.0001); + } + + @Test + public void testCombine() + { + Pair pair1 = new Pair<>(1467225000L, 3.621); + Pair pair2 = new Pair<>(1467240000L, 785.4); + Assert.assertEquals(pair1, doubleFirstAggFactory.combine(pair1, pair2)); + } + + + @Test + public void testEqualsAndHashCode() throws Exception + { + FirstAggregatorFactory one = new FirstAggregatorFactory("name1", "fieldName1", "double"); + FirstAggregatorFactory oneAgain = new FirstAggregatorFactory("name1", "fieldName1", "double"); + FirstAggregatorFactory two = new FirstAggregatorFactory("name1", "fieldName1", "long"); + FirstAggregatorFactory three = new FirstAggregatorFactory("name2", "fieldName2", "double"); + + Assert.assertEquals(one.hashCode(), oneAgain.hashCode()); + + Assert.assertTrue(one.equals(oneAgain)); + Assert.assertFalse(one.equals(two)); + Assert.assertFalse(one.equals(three)); + } + + private void aggregate(TestLongColumnSelector timeSelector, TestFloatColumnSelector selector, DoubleFirstAggregator agg) + { + agg.aggregate(); + timeSelector.increment(); + selector.increment(); + } + + private void aggregate(TestLongColumnSelector timeSelector, TestFloatColumnSelector selector, DoubleFirstBufferAggregator agg, ByteBuffer buff, int position) + { + agg.aggregate(buff, position); + timeSelector.increment(); + selector.increment(); + } + + private void aggregate(TestLongColumnSelector timeSelector, TestLongColumnSelector selector, LongFirstAggregator agg) + { + agg.aggregate(); + timeSelector.increment(); + selector.increment(); + } + + private void aggregate(TestLongColumnSelector timeSelector, TestLongColumnSelector selector, LongFirstBufferAggregator agg, ByteBuffer buff, int position) + { + agg.aggregate(buff, position); + timeSelector.increment(); + selector.increment(); + } +} diff --git a/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java new file mode 100644 index 000000000000..9bef23d347bd --- /dev/null +++ b/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java @@ -0,0 +1,227 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import com.metamx.common.Pair; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.column.Column; +import org.easymock.EasyMock; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.nio.ByteBuffer; + +public class LastAggregationTest +{ + private LastAggregatorFactory doubleLastAggFactory; + private LastAggregatorFactory longLastAggFactory; + private ColumnSelectorFactory colSelectorFactory; + private TestLongColumnSelector timeSelector; + private TestFloatColumnSelector floatSelector; + private TestLongColumnSelector longSelector; + + private long[] longValues = {62, 8, 54, 2}; + private float[] floatValues = {1.1f, 2.7f, 3.5f, 1.3f}; + private long[] times = {1467225096, 146722598, 1467225099, 1467225111}; + + public LastAggregationTest() throws Exception + { + String doubleSpecJson = "{\"type\": \"last\", \"name\": \"billy\", \"fieldName\": \"nilly\", \"value\": \"double\"}"; + String longSpecJson = "{\"type\": \"last\", \"name\": \"bill\", \"fieldName\": \"nnn\", \"value\": \"long\"}"; + doubleLastAggFactory = new DefaultObjectMapper().readValue(doubleSpecJson, LastAggregatorFactory.class); + longLastAggFactory = new DefaultObjectMapper().readValue(longSpecJson, LastAggregatorFactory.class); + } + + @Before + public void setup() + { + timeSelector = new TestLongColumnSelector(times); + floatSelector = new TestFloatColumnSelector(floatValues); + longSelector = new TestLongColumnSelector(longValues); + colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME)).andReturn(timeSelector); + EasyMock.expect(colSelectorFactory.makeFloatColumnSelector("nilly")).andReturn(floatSelector); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector("nnn")).andReturn(longSelector); + EasyMock.replay(colSelectorFactory); + } + + @Test + public void testDoubleLastAggregator() + { + DoubleLastAggregator agg = (DoubleLastAggregator) doubleLastAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(timeSelector, floatSelector, agg); + aggregate(timeSelector, floatSelector, agg); + aggregate(timeSelector, floatSelector, agg); + aggregate(timeSelector, floatSelector, agg); + + Pair result = (Pair) agg.get(); + + Assert.assertEquals(times[3], result.lhs.longValue()); + Assert.assertEquals(floatValues[3], result.rhs, 0.0001); + Assert.assertEquals((long) floatValues[3], agg.getLong()); + Assert.assertEquals(floatValues[3], agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); + } + + @Test + public void testDoubleLastBufferAggregator() + { + DoubleLastBufferAggregator agg = (DoubleLastBufferAggregator) doubleLastAggFactory.factorizeBuffered( + colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(timeSelector, floatSelector, agg, buffer, 0); + aggregate(timeSelector, floatSelector, agg, buffer, 0); + aggregate(timeSelector, floatSelector, agg, buffer, 0); + aggregate(timeSelector, floatSelector, agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + + Assert.assertEquals(times[3], result.lhs.longValue()); + Assert.assertEquals(floatValues[3], result.rhs, 0.0001); + Assert.assertEquals((long) floatValues[3], agg.getLong(buffer, 0)); + Assert.assertEquals(floatValues[3], agg.getFloat(buffer, 0), 0.0001); + } + + @Test + public void testLongLastAggregator() + { + LongLastAggregator agg = (LongLastAggregator) longLastAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("bill", agg.getName()); + + aggregate(timeSelector, longSelector, agg); + aggregate(timeSelector, longSelector, agg); + aggregate(timeSelector, longSelector, agg); + aggregate(timeSelector, longSelector, agg); + + Pair result = (Pair) agg.get(); + + Assert.assertEquals(times[3], result.lhs.longValue()); + Assert.assertEquals(longValues[3], result.rhs.longValue()); + Assert.assertEquals(longValues[3], agg.getLong()); + Assert.assertEquals(longValues[3], agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs.longValue()); + } + + @Test + public void testLongLastBufferAggregator() + { + LongLastBufferAggregator agg = (LongLastBufferAggregator) longLastAggFactory.factorizeBuffered(colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(timeSelector, longSelector, agg, buffer, 0); + aggregate(timeSelector, longSelector, agg, buffer, 0); + aggregate(timeSelector, longSelector, agg, buffer, 0); + aggregate(timeSelector, longSelector, agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + + Assert.assertEquals(times[3], result.lhs.longValue()); + Assert.assertEquals(longValues[3], result.rhs.longValue()); + Assert.assertEquals(longValues[3], agg.getLong(buffer, 0)); + Assert.assertEquals(longValues[3], agg.getFloat(buffer, 0), 0.0001); + } + + @Test + public void testCombine() + { + Pair pair1 = new Pair<>(1467225000L, 3.621); + Pair pair2 = new Pair<>(1467240000L, 785.4); + Assert.assertEquals(pair2, doubleLastAggFactory.combine(pair1, pair2)); + } + + + @Test + public void testEqualsAndHashCode() throws Exception + { + LastAggregatorFactory one = new LastAggregatorFactory("name1", "fieldName1", "double"); + LastAggregatorFactory oneAgain = new LastAggregatorFactory("name1", "fieldName1", "double"); + LastAggregatorFactory two = new LastAggregatorFactory("name1", "fieldName1", "long"); + LastAggregatorFactory three = new LastAggregatorFactory("name2", "fieldName2", "double"); + + Assert.assertEquals(one.hashCode(), oneAgain.hashCode()); + + Assert.assertTrue(one.equals(oneAgain)); + Assert.assertFalse(one.equals(two)); + Assert.assertFalse(one.equals(three)); + } + + private void aggregate( + TestLongColumnSelector timeSelector, + TestFloatColumnSelector selector, + DoubleLastAggregator agg + ) + { + agg.aggregate(); + timeSelector.increment(); + selector.increment(); + } + + private void aggregate( + TestLongColumnSelector timeSelector, + TestFloatColumnSelector selector, + DoubleLastBufferAggregator agg, + ByteBuffer buff, + int position + ) + { + agg.aggregate(buff, position); + timeSelector.increment(); + selector.increment(); + } + + private void aggregate( + TestLongColumnSelector timeSelector, + TestLongColumnSelector selector, + LongLastAggregator agg + ) + { + agg.aggregate(); + timeSelector.increment(); + selector.increment(); + } + + private void aggregate( + TestLongColumnSelector timeSelector, + TestLongColumnSelector selector, + LongLastBufferAggregator agg, + ByteBuffer buff, + int position + ) + { + agg.aggregate(buff, position); + timeSelector.increment(); + selector.increment(); + } +} diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index a7fc939e67e5..64ea3295fed8 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -59,7 +59,9 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; +import io.druid.query.aggregation.FirstAggregatorFactory; import io.druid.query.aggregation.JavaScriptAggregatorFactory; +import io.druid.query.aggregation.LastAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; @@ -1615,6 +1617,49 @@ public void testGroupByWithCardinality() TestHelper.assertExpectedObjects(expectedResults, results, ""); } + @Test + public void testGroupByWithFirstLast() + { + GroupByQuery query = GroupByQuery + .builder() + .setDataSource(QueryRunnerTestHelper.dataSource) + .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) + .setDimensions( + Arrays.asList( + new DefaultDimensionSpec( + QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.marketDimension + ) + ) + ) + .setAggregatorSpecs( + Arrays.asList( + new FirstAggregatorFactory("first", "index", "long"), + new LastAggregatorFactory("last", "index", "long") + ) + ) + .setGranularity(QueryRunnerTestHelper.monthGran) + .build(); + + List expectedResults = Arrays.asList( + GroupByQueryRunnerTestHelper.createExpectedRow("2011-01-01", "market", "spot", "first", 100L, "last", 155L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-01-01", "market", "total_market", "first", 1000L, "last", 1127L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-01-01", "market", "upfront", "first", 800L, "last", 943L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-02-01", "market", "spot", "first", 132L, "last", 114L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-02-01", "market", "total_market", "first", 1203L, "last", 1292L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-02-01", "market", "upfront", "first", 1667L, "last", 1101L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-01", "market", "spot", "first", 153L, "last", 125L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-01", "market", "total_market", "first", 1124L, "last", 1366L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-01", "market", "upfront", "first", 1166L, "last", 1063L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "market", "spot", "first", 135L, "last", 120L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "market", "total_market", "first", 1314L, "last", 1029L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "market", "upfront", "first", 1447L, "last", 780L) + ); + + Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + TestHelper.assertExpectedObjects(expectedResults, results, ""); + } + @Test public void testGroupByWithNullProducingDimExtractionFn() { From 759d902152fe358f47eb8b47e3d8698d335ac419 Mon Sep 17 00:00:00 2001 From: Dave Li Date: Wed, 6 Jul 2016 17:13:56 -0700 Subject: [PATCH 02/10] add test and fix --- .../benchmark/query/GroupByBenchmark.java | 6 +- .../druid/collections/SerializablePair.java | 29 +++-- .../aggregation/DoubleFirstAggregator.java | 11 +- .../DoubleFirstBufferAggregator.java | 14 ++- .../aggregation/DoubleLastAggregator.java | 13 +- .../DoubleLastBufferAggregator.java | 16 ++- .../aggregation/FirstAggregatorFactory.java | 108 ++++++++++++---- .../aggregation/LastAggregatorFactory.java | 116 +++++++++++++----- .../aggregation/LongFirstAggregator.java | 11 +- .../LongFirstBufferAggregator.java | 12 +- .../query/aggregation/LongLastAggregator.java | 13 +- .../aggregation/LongLastBufferAggregator.java | 14 ++- .../aggregation/FirstAggregationTest.java | 50 +++++--- .../aggregation/LastAggregationTest.java | 7 +- .../query/groupby/GroupByQueryRunnerTest.java | 52 ++++++-- .../timeseries/TimeseriesQueryRunnerTest.java | 110 +++++++++++++++++ .../druid/query/topn/TopNQueryRunnerTest.java | 47 +++++++ 17 files changed, 488 insertions(+), 141 deletions(-) rename benchmarks/src/main/java/io/druid/benchmark/aggregator/LongMaxBenchmark.java => common/src/main/java/io/druid/collections/SerializablePair.java (67%) diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java index cc3bedceb39a..a520ff338d17 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java @@ -51,8 +51,6 @@ import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryToolChest; import io.druid.query.aggregation.AggregatorFactory; -import io.druid.query.aggregation.FirstAggregatorFactory; -import io.druid.query.aggregation.LastAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.dimension.DefaultDimensionSpec; @@ -107,8 +105,8 @@ @State(Scope.Benchmark) @Fork(jvmArgsPrepend = "-server", value = 1) -@Warmup(iterations = 0) -@Measurement(iterations = 1) +@Warmup(iterations = 10) +@Measurement(iterations = 25) public class GroupByBenchmark { @Param({"4"}) diff --git a/benchmarks/src/main/java/io/druid/benchmark/aggregator/LongMaxBenchmark.java b/common/src/main/java/io/druid/collections/SerializablePair.java similarity index 67% rename from benchmarks/src/main/java/io/druid/benchmark/aggregator/LongMaxBenchmark.java rename to common/src/main/java/io/druid/collections/SerializablePair.java index 025bbd566725..8f3f574be8a2 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/aggregator/LongMaxBenchmark.java +++ b/common/src/main/java/io/druid/collections/SerializablePair.java @@ -17,22 +17,27 @@ * under the License. */ -package io.druid.benchmark.aggregator; +package io.druid.collections; -import org.openjdk.jmh.annotations.Measurement; -import org.openjdk.jmh.annotations.Scope; -import org.openjdk.jmh.annotations.Setup; -import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Warmup; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.metamx.common.Pair; -@State(Scope.Benchmark) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -public class LongMaxBenchmark +public class SerializablePair extends Pair { - @Setup - public void setup() + public SerializablePair(@JsonProperty T1 lhs, @JsonProperty T2 rhs) { + super(lhs, rhs); + } + @JsonProperty + public T1 getLhs() + { + return lhs; + } + + @JsonProperty + public T2 getRhs() + { + return rhs; } } diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java index 0dc0cf06ed5d..cf590b747502 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java @@ -19,7 +19,7 @@ package io.druid.query.aggregation; -import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; @@ -45,8 +45,9 @@ public DoubleFirstAggregator(String name, FloatColumnSelector valueSelector, Lon @Override public void aggregate() { - if (firstTime == -1) { - firstTime = timeSelector.get(); + long time = timeSelector.get(); + if (time < firstTime) { + firstTime = time; firstValue = valueSelector.get(); } } @@ -54,14 +55,14 @@ public void aggregate() @Override public void reset() { - firstTime = -1; + firstTime = Long.MAX_VALUE; firstValue = 0; } @Override public Object get() { - return new Pair<>(firstTime, firstValue); + return new SerializablePair<>(firstTime, firstValue); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java index 3c504960a6e3..e47b870595f4 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java @@ -20,7 +20,7 @@ package io.druid.query.aggregation; import com.google.common.primitives.Longs; -import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; @@ -40,15 +40,17 @@ public DoubleFirstBufferAggregator(LongColumnSelector timeSelector, FloatColumnS @Override public void init(ByteBuffer buf, int position) { - buf.putLong(position, -1); - buf.putLong(position + Longs.BYTES, 0); + buf.putLong(position, Long.MAX_VALUE); + buf.putDouble(position + Longs.BYTES, 0); } @Override public void aggregate(ByteBuffer buf, int position) { - if (buf.getLong(position) == -1) { - buf.putLong(position, timeSelector.get()); + long time = timeSelector.get(); + long firstTime = buf.getLong(position); + if (time < firstTime) { + buf.putLong(position, time); buf.putDouble(position + Longs.BYTES, valueSelector.get()); } } @@ -56,7 +58,7 @@ public void aggregate(ByteBuffer buf, int position) @Override public Object get(ByteBuffer buf, int position) { - return new Pair<>(buf.getLong(position), buf.getDouble(position + Longs.BYTES)); + return new SerializablePair<>(buf.getLong(position), buf.getDouble(position + Longs.BYTES)); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java index d92249d2a3b5..a0b7ecf30288 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java @@ -19,7 +19,7 @@ package io.druid.query.aggregation; -import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; @@ -45,21 +45,24 @@ public DoubleLastAggregator(String name, FloatColumnSelector valueSelector, Long @Override public void aggregate() { - lastTime = timeSelector.get(); - lastValue = valueSelector.get(); + long time = timeSelector.get(); + if (time >= lastTime) { + lastTime = timeSelector.get(); + lastValue = valueSelector.get(); + } } @Override public void reset() { - lastTime = -1; + lastTime = Long.MIN_VALUE; lastValue = 0; } @Override public Object get() { - return new Pair<>(lastTime, lastValue); + return new SerializablePair<>(lastTime, lastValue); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java index 2d5431ef2b7b..96c95df83cf4 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java @@ -20,7 +20,7 @@ package io.druid.query.aggregation; import com.google.common.primitives.Longs; -import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; @@ -40,21 +40,25 @@ public DoubleLastBufferAggregator(LongColumnSelector timeSelector, FloatColumnSe @Override public void init(ByteBuffer buf, int position) { - buf.putLong(position, -1); - buf.putLong(position + Longs.BYTES, 0); + buf.putLong(position, Long.MIN_VALUE); + buf.putDouble(position + Longs.BYTES, 0); } @Override public void aggregate(ByteBuffer buf, int position) { - buf.putLong(position, timeSelector.get()); - buf.putDouble(position + Longs.BYTES, valueSelector.get()); + long time = timeSelector.get(); + long lastTime = buf.getLong(position); + if (time >= lastTime) { + buf.putLong(position, time); + buf.putDouble(position + Longs.BYTES, valueSelector.get()); + } } @Override public Object get(ByteBuffer buf, int position) { - return new Pair<>(buf.getLong(position), buf.getDouble(position + Longs.BYTES)); + return new SerializablePair<>(buf.getLong(position), buf.getDouble(position + Longs.BYTES)); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java index 1176086deb00..1fd32578136b 100644 --- a/processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java @@ -25,8 +25,8 @@ import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; import com.metamx.common.IAE; -import com.metamx.common.Pair; import com.metamx.common.StringUtils; +import io.druid.collections.SerializablePair; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; import io.druid.segment.column.Column; @@ -35,9 +35,14 @@ import java.util.Arrays; import java.util.Comparator; import java.util.List; +import java.util.Map; public class FirstAggregatorFactory extends AggregatorFactory { + private static final byte CACHE_TYPE_ID = 0x10; + private static final String TYPE_LONG = "long"; + private static final String TYPE_DOUBLE = "double"; + private final String fieldName; private final String name; private final String value; @@ -51,7 +56,10 @@ public FirstAggregatorFactory( { Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); - Preconditions.checkArgument(value.equals("long") || value.equals("double"), "Must have a valid, non-null type"); + Preconditions.checkArgument( + value.equals(TYPE_LONG) || value.equals(TYPE_DOUBLE), + "Must have a valid, non-null type" + ); this.name = name; this.fieldName = fieldName; @@ -61,12 +69,12 @@ public FirstAggregatorFactory( @Override public Aggregator factorize(ColumnSelectorFactory metricFactory) { - if (value.equals("long")) { + if (value.equals(TYPE_LONG)) { return new LongFirstAggregator( name, metricFactory.makeLongColumnSelector(fieldName), metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) ); - } else if (value.equals("double")) { + } else if (value.equals(TYPE_DOUBLE)) { return new DoubleFirstAggregator( name, metricFactory.makeFloatColumnSelector(fieldName), metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) @@ -78,12 +86,12 @@ public Aggregator factorize(ColumnSelectorFactory metricFactory) @Override public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) { - if (value.equals("long")) { + if (value.equals(TYPE_LONG)) { return new LongFirstBufferAggregator( metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), metricFactory.makeLongColumnSelector(fieldName) ); - } else if (value.equals("double")) { + } else if (value.equals(TYPE_DOUBLE)) { return new DoubleFirstBufferAggregator( metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), metricFactory.makeFloatColumnSelector(fieldName) @@ -100,7 +108,7 @@ public Comparator getComparator() @Override public int compare(Object o1, Object o2) { - return Longs.compare(((Pair) o1).lhs, ((Pair) o2).lhs); + return Longs.compare(((SerializablePair) o1).lhs, ((SerializablePair) o2).lhs); } }; } @@ -108,36 +116,39 @@ public int compare(Object o1, Object o2) @Override public Object combine(Object lhs, Object rhs) { - return (((Pair) lhs).lhs <= ((Pair) rhs).lhs) ? lhs : rhs; + return (((SerializablePair) lhs).lhs <= ((SerializablePair) rhs).lhs) ? lhs : rhs; } @Override public AggregatorFactory getCombiningFactory() { - return new FirstAggregatorFactory(name, name, value) { + return new FirstAggregatorFactory(name, name, value) + { @Override public Aggregator factorize(ColumnSelectorFactory metricFactory) { final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); - if (value.equals("long")) { - return new LongFirstAggregator(name, null, null) { + if (value.equals(TYPE_LONG)) { + return new LongFirstAggregator(name, null, null) + { @Override public void aggregate() { - if (firstTime == -1) { - Pair pair = (Pair)selector.get(); + SerializablePair pair = (SerializablePair) selector.get(); + if (pair.lhs < firstTime) { firstTime = pair.lhs; firstValue = pair.rhs; } } }; - } else if (value.equals("double")) { - return new DoubleFirstAggregator(name, null, null) { + } else if (value.equals(TYPE_DOUBLE)) { + return new DoubleFirstAggregator(name, null, null) + { @Override public void aggregate() { - if (firstTime == -1) { - Pair pair = (Pair)selector.get(); + SerializablePair pair = (SerializablePair) selector.get(); + if (pair.lhs < firstTime) { firstTime = pair.lhs; firstValue = pair.rhs; } @@ -146,6 +157,42 @@ public void aggregate() } throw new IAE("undefined type"); } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + if (value.equals(TYPE_LONG)) { + return new LongFirstBufferAggregator(null, null) + { + @Override + public void aggregate(ByteBuffer buf, int position) + { + SerializablePair pair = (SerializablePair) selector.get(); + long firstTime = buf.getLong(position); + if (pair.lhs < firstTime) { + buf.putLong(position, pair.lhs); + buf.putLong(position + Longs.BYTES, pair.rhs); + } + } + }; + } else if (value.equals(TYPE_DOUBLE)) { + return new DoubleFirstBufferAggregator(null, null) + { + @Override + public void aggregate(ByteBuffer buf, int position) + { + SerializablePair pair = (SerializablePair) selector.get(); + long firstTime = buf.getLong(position); + if (pair.lhs < firstTime) { + buf.putLong(position, pair.lhs); + buf.putDouble(position + Longs.BYTES, pair.rhs); + } + } + }; + } + throw new IAE("undefined type"); + } }; } @@ -169,13 +216,19 @@ public List getRequiredColumns() @Override public Object deserialize(Object object) { - return object; + Map map = (Map) object; + if (value.equals(TYPE_LONG)) { + return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).longValue()); + } else if (value.equals(TYPE_DOUBLE)) { + return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).doubleValue()); + } + throw new IAE("undefined type"); } @Override public Object finalizeComputation(Object object) { - return ((Pair) object).rhs; + return ((SerializablePair) object).rhs; } @Override @@ -207,21 +260,22 @@ public List requiredFields() public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(1 + fieldNameBytes.length).put((byte) 0x10).put(fieldNameBytes).array(); + + return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); } @Override public String getTypeName() { - return value.equals("double") ? "float" : value; + return value.equals(TYPE_DOUBLE) ? "float" : value; } @Override public int getMaxIntermediateSize() { - if (value.equals("long")) { + if (value.equals(TYPE_LONG)) { return Longs.BYTES * 2; - } else if (value.equals("double")) { + } else if (value.equals(TYPE_DOUBLE)) { return Longs.BYTES + Doubles.BYTES; } throw new IAE("undefined type"); @@ -230,10 +284,10 @@ public int getMaxIntermediateSize() @Override public Object getAggregatorStartValue() { - if (value.equals("long")) { - return new Pair<>(-1L, 0L); - } else if (value.equals("double")) { - return new Pair<>(-1L, 0D); + if (value.equals(TYPE_LONG)) { + return new SerializablePair<>(Long.MAX_VALUE, 0L); + } else if (value.equals(TYPE_DOUBLE)) { + return new SerializablePair<>(Long.MAX_VALUE, 0D); } throw new IAE("undefined type"); } diff --git a/processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java index 493215598679..e6155b67a8c2 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java @@ -25,8 +25,8 @@ import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; import com.metamx.common.IAE; -import com.metamx.common.Pair; import com.metamx.common.StringUtils; +import io.druid.collections.SerializablePair; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; import io.druid.segment.column.Column; @@ -35,9 +35,14 @@ import java.util.Arrays; import java.util.Comparator; import java.util.List; +import java.util.Map; public class LastAggregatorFactory extends AggregatorFactory { + private static final byte CACHE_TYPE_ID = 0x11; + private static final String TYPE_LONG = "long"; + private static final String TYPE_DOUBLE = "double"; + private final String fieldName; private final String name; private final String value; @@ -51,7 +56,10 @@ public LastAggregatorFactory( { Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); - Preconditions.checkArgument(value.equals("long") || value.equals("double"), "Must have a valid, non-null type"); + Preconditions.checkArgument( + value.equals(TYPE_LONG) || value.equals(TYPE_DOUBLE), + "Must have a valid, non-null type" + ); this.name = name; this.fieldName = fieldName; @@ -61,12 +69,12 @@ public LastAggregatorFactory( @Override public Aggregator factorize(ColumnSelectorFactory metricFactory) { - if (value.equals("long")) { + if (value.equals(TYPE_LONG)) { return new LongLastAggregator( name, metricFactory.makeLongColumnSelector(fieldName), metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) ); - } else if (value.equals("double")) { + } else if (value.equals(TYPE_DOUBLE)) { return new DoubleLastAggregator( name, metricFactory.makeFloatColumnSelector(fieldName), metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) @@ -78,12 +86,12 @@ public Aggregator factorize(ColumnSelectorFactory metricFactory) @Override public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) { - if (value.equals("long")) { + if (value.equals(TYPE_LONG)) { return new LongLastBufferAggregator( metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), metricFactory.makeLongColumnSelector(fieldName) ); - } else if (value.equals("double")) { + } else if (value.equals(TYPE_DOUBLE)) { return new DoubleLastBufferAggregator( metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), metricFactory.makeFloatColumnSelector(fieldName) @@ -100,7 +108,7 @@ public Comparator getComparator() @Override public int compare(Object o1, Object o2) { - return Longs.compare(((Pair) o1).lhs, ((Pair) o2).lhs); + return Longs.compare(((SerializablePair) o1).lhs, ((SerializablePair) o2).lhs); } }; } @@ -108,35 +116,78 @@ public int compare(Object o1, Object o2) @Override public Object combine(Object lhs, Object rhs) { - return (((Pair) lhs).lhs > ((Pair) rhs).lhs) ? lhs : rhs; + return (((SerializablePair) lhs).lhs > ((SerializablePair) rhs).lhs) ? lhs : rhs; } @Override public AggregatorFactory getCombiningFactory() { - return new LastAggregatorFactory(name, name, value) { + return new LastAggregatorFactory(name, name, value) + { @Override public Aggregator factorize(ColumnSelectorFactory metricFactory) { final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); - if (value.equals("long")) { - return new LongLastAggregator(name, null, null) { + if (value.equals(TYPE_LONG)) { + return new LongLastAggregator(name, null, null) + { @Override public void aggregate() { - Pair pair = (Pair)selector.get(); - lastTime = pair.lhs; - lastValue = pair.rhs; + SerializablePair pair = (SerializablePair) selector.get(); + if (pair.rhs >= lastTime) { + lastTime = pair.lhs; + lastValue = pair.rhs; + } } }; - } else if (value.equals("double")) { - return new DoubleLastAggregator(name, null, null) { + } else if (value.equals(TYPE_DOUBLE)) { + return new DoubleLastAggregator(name, null, null) + { @Override public void aggregate() { - Pair pair = (Pair)selector.get(); - lastTime = pair.lhs; - lastValue = pair.rhs; + SerializablePair pair = (SerializablePair) selector.get(); + if (pair.rhs >= lastTime) { + lastTime = pair.lhs; + lastValue = pair.rhs; + } + } + }; + } + throw new IAE("undefined type"); + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + if (value.equals(TYPE_LONG)) { + return new LongLastBufferAggregator(null, null) + { + @Override + public void aggregate(ByteBuffer buf, int position) + { + SerializablePair pair = (SerializablePair) selector.get(); + long lastTime = buf.getLong(position); + if (pair.lhs >= lastTime) { + buf.putLong(position, pair.lhs); + buf.putLong(position + Longs.BYTES, pair.rhs); + } + } + }; + } else if (value.equals(TYPE_DOUBLE)) { + return new DoubleLastBufferAggregator(null, null) + { + @Override + public void aggregate(ByteBuffer buf, int position) + { + SerializablePair pair = (SerializablePair) selector.get(); + long lastTime = buf.getLong(position); + if (pair.lhs >= lastTime) { + buf.putLong(position, pair.lhs); + buf.putDouble(position + Longs.BYTES, pair.rhs); + } } }; } @@ -165,13 +216,19 @@ public List getRequiredColumns() @Override public Object deserialize(Object object) { - return object; + Map map = (Map) object; + if (value.equals(TYPE_LONG)) { + return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).longValue()); + } else if (value.equals(TYPE_DOUBLE)) { + return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).doubleValue()); + } + throw new IAE("undefined type"); } @Override public Object finalizeComputation(Object object) { - return ((Pair) object).rhs; + return ((SerializablePair) object).rhs; } @Override @@ -203,21 +260,22 @@ public List requiredFields() public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(1 + fieldNameBytes.length).put((byte) 0x11).put(fieldNameBytes).array(); + + return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); } @Override public String getTypeName() { - return value.equals("double") ? "float" : value; + return value.equals(TYPE_DOUBLE) ? "float" : value; } @Override public int getMaxIntermediateSize() { - if (value.equals("long")) { + if (value.equals(TYPE_LONG)) { return Longs.BYTES * 2; - } else if (value.equals("double")) { + } else if (value.equals(TYPE_DOUBLE)) { return Longs.BYTES + Doubles.BYTES; } throw new IAE("undefined type"); @@ -226,10 +284,10 @@ public int getMaxIntermediateSize() @Override public Object getAggregatorStartValue() { - if (value.equals("long")) { - return new Pair<>(-1L, 0L); - } else if (value.equals("double")) { - return new Pair<>(-1L, 0D); + if (value.equals(TYPE_LONG)) { + return new SerializablePair<>(Long.MIN_VALUE, 0L); + } else if (value.equals(TYPE_DOUBLE)) { + return new SerializablePair<>(Long.MIN_VALUE, 0D); } throw new IAE("undefined type"); } diff --git a/processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java b/processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java index fd98a12d8417..1484da1fa5a5 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java @@ -19,7 +19,7 @@ package io.druid.query.aggregation; -import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.segment.LongColumnSelector; public class LongFirstAggregator implements Aggregator @@ -44,8 +44,9 @@ public LongFirstAggregator(String name, LongColumnSelector valueSelector, LongCo @Override public void aggregate() { - if (firstTime == -1) { - firstTime = timeSelector.get(); + long time = timeSelector.get(); + if (time < firstTime) { + firstTime = time; firstValue = valueSelector.get(); } } @@ -53,14 +54,14 @@ public void aggregate() @Override public void reset() { - firstTime = -1; + firstTime = Long.MAX_VALUE; firstValue = 0; } @Override public Object get() { - return new Pair<>(firstTime, firstValue); + return new SerializablePair<>(firstTime, firstValue); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java index fa6e4eedfc71..3aae1713ca57 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java @@ -20,7 +20,7 @@ package io.druid.query.aggregation; import com.google.common.primitives.Longs; -import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.segment.LongColumnSelector; import java.nio.ByteBuffer; @@ -39,15 +39,17 @@ public LongFirstBufferAggregator(LongColumnSelector timeSelector, LongColumnSele @Override public void init(ByteBuffer buf, int position) { - buf.putLong(position, -1); + buf.putLong(position, Long.MAX_VALUE); buf.putLong(position + Longs.BYTES, 0); } @Override public void aggregate(ByteBuffer buf, int position) { - if (buf.getLong(position) == -1) { - buf.putLong(position, timeSelector.get()); + long time = timeSelector.get(); + long firstTime = buf.getLong(position); + if (time < firstTime) { + buf.putLong(position, time); buf.putLong(position + Longs.BYTES, valueSelector.get()); } } @@ -55,7 +57,7 @@ public void aggregate(ByteBuffer buf, int position) @Override public Object get(ByteBuffer buf, int position) { - return new Pair<>(buf.getLong(position), buf.getLong(position + Longs.BYTES)); + return new SerializablePair<>(buf.getLong(position), buf.getLong(position + Longs.BYTES)); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java b/processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java index 63926af68f39..5d7a173d9888 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java @@ -19,7 +19,7 @@ package io.druid.query.aggregation; -import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.segment.LongColumnSelector; public class LongLastAggregator implements Aggregator @@ -43,21 +43,24 @@ public LongLastAggregator(String name, LongColumnSelector valueSelector, LongCol @Override public void aggregate() { - lastTime = timeSelector.get(); - lastValue = valueSelector.get(); + long time = timeSelector.get(); + if (time >= lastTime) { + lastTime = timeSelector.get(); + lastValue = valueSelector.get(); + } } @Override public void reset() { - lastTime = -1; + lastTime = Long.MIN_VALUE; lastValue = 0; } @Override public Object get() { - return new Pair<>(lastTime, lastValue); + return new SerializablePair<>(lastTime, lastValue); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java index 1967d1e61bf9..41d601cbf4e9 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java @@ -20,7 +20,7 @@ package io.druid.query.aggregation; import com.google.common.primitives.Longs; -import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.segment.LongColumnSelector; import java.nio.ByteBuffer; @@ -39,21 +39,25 @@ public LongLastBufferAggregator(LongColumnSelector timeSelector, LongColumnSelec @Override public void init(ByteBuffer buf, int position) { - buf.putLong(position, -1); + buf.putLong(position, Long.MIN_VALUE); buf.putLong(position + Longs.BYTES, 0); } @Override public void aggregate(ByteBuffer buf, int position) { - buf.putLong(position, timeSelector.get()); - buf.putLong(position + Longs.BYTES, valueSelector.get()); + long time = timeSelector.get(); + long lastTime = buf.getLong(position); + if (time >= lastTime) { + buf.putLong(position, time); + buf.putLong(position + Longs.BYTES, valueSelector.get()); + } } @Override public Object get(ByteBuffer buf, int position) { - return new Pair<>(buf.getLong(position), buf.getLong(position + Longs.BYTES)); + return new SerializablePair<>(buf.getLong(position), buf.getLong(position + Longs.BYTES)); } @Override diff --git a/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java index 822e97e920ce..1f1f501cbdb1 100644 --- a/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java @@ -20,6 +20,7 @@ package io.druid.query.aggregation; import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.column.Column; @@ -41,14 +42,14 @@ public class FirstAggregationTest private long[] longValues = {62, 8, 54, 2}; private float[] floatValues = {1.1f, 2.7f, 3.5f, 1.3f}; - private long[] times = {1467225096, 146722598, 1467225099, 1467225111}; + private long[] times = {1467225096, 1467225098, 1467225099, 1467225111}; public FirstAggregationTest() throws Exception { String doubleSpecJson = "{\"type\": \"first\", \"name\": \"billy\", \"fieldName\": \"nilly\", \"value\": \"double\"}"; String longSpecJson = "{\"type\": \"first\", \"name\": \"bill\", \"fieldName\": \"nnn\", \"value\": \"long\"}"; - doubleFirstAggFactory = new DefaultObjectMapper().readValue(doubleSpecJson , FirstAggregatorFactory.class); - longFirstAggFactory = new DefaultObjectMapper().readValue(longSpecJson , FirstAggregatorFactory.class); + doubleFirstAggFactory = new DefaultObjectMapper().readValue(doubleSpecJson, FirstAggregatorFactory.class); + longFirstAggFactory = new DefaultObjectMapper().readValue(longSpecJson, FirstAggregatorFactory.class); } @Before @@ -76,21 +77,22 @@ public void testDoubleFirstAggregator() aggregate(timeSelector, floatSelector, agg); aggregate(timeSelector, floatSelector, agg); - Pair result = (Pair)agg.get(); + Pair result = (Pair) agg.get(); Assert.assertEquals(times[0], result.lhs.longValue()); Assert.assertEquals(floatValues[0], result.rhs, 0.0001); - Assert.assertEquals((long)floatValues[0], agg.getLong()); + Assert.assertEquals((long) floatValues[0], agg.getLong()); Assert.assertEquals(floatValues[0], agg.getFloat(), 0.0001); agg.reset(); - Assert.assertEquals(0, ((Pair)agg.get()).rhs, 0.0001); + Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); } @Test public void testDoubleFirstBufferAggregator() { - DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) doubleFirstAggFactory.factorizeBuffered(colSelectorFactory); + DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) doubleFirstAggFactory.factorizeBuffered( + colSelectorFactory); ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSize()]); agg.init(buffer, 0); @@ -100,7 +102,7 @@ public void testDoubleFirstBufferAggregator() aggregate(timeSelector, floatSelector, agg, buffer, 0); aggregate(timeSelector, floatSelector, agg, buffer, 0); - Pair result = (Pair)agg.get(buffer, 0); + Pair result = (Pair) agg.get(buffer, 0); Assert.assertEquals(times[0], result.lhs.longValue()); Assert.assertEquals(floatValues[0], result.rhs, 0.0001); @@ -120,7 +122,7 @@ public void testLongFirstAggregator() aggregate(timeSelector, longSelector, agg); aggregate(timeSelector, longSelector, agg); - Pair result = (Pair)agg.get(); + Pair result = (Pair) agg.get(); Assert.assertEquals(times[0], result.lhs.longValue()); Assert.assertEquals(longValues[0], result.rhs.longValue()); @@ -128,7 +130,7 @@ public void testLongFirstAggregator() Assert.assertEquals(longValues[0], agg.getFloat(), 0.0001); agg.reset(); - Assert.assertEquals(0, ((Pair)agg.get()).rhs.longValue()); + Assert.assertEquals(0, ((Pair) agg.get()).rhs.longValue()); } @Test @@ -144,7 +146,7 @@ public void testLongFirstBufferAggregator() aggregate(timeSelector, longSelector, agg, buffer, 0); aggregate(timeSelector, longSelector, agg, buffer, 0); - Pair result = (Pair)agg.get(buffer, 0); + Pair result = (Pair) agg.get(buffer, 0); Assert.assertEquals(times[0], result.lhs.longValue()); Assert.assertEquals(longValues[0], result.rhs.longValue()); @@ -155,8 +157,8 @@ public void testLongFirstBufferAggregator() @Test public void testCombine() { - Pair pair1 = new Pair<>(1467225000L, 3.621); - Pair pair2 = new Pair<>(1467240000L, 785.4); + SerializablePair pair1 = new SerializablePair<>(1467225000L, 3.621); + SerializablePair pair2 = new SerializablePair<>(1467240000L, 785.4); Assert.assertEquals(pair1, doubleFirstAggFactory.combine(pair1, pair2)); } @@ -176,14 +178,24 @@ public void testEqualsAndHashCode() throws Exception Assert.assertFalse(one.equals(three)); } - private void aggregate(TestLongColumnSelector timeSelector, TestFloatColumnSelector selector, DoubleFirstAggregator agg) + private void aggregate( + TestLongColumnSelector timeSelector, + TestFloatColumnSelector selector, + DoubleFirstAggregator agg + ) { agg.aggregate(); timeSelector.increment(); selector.increment(); } - private void aggregate(TestLongColumnSelector timeSelector, TestFloatColumnSelector selector, DoubleFirstBufferAggregator agg, ByteBuffer buff, int position) + private void aggregate( + TestLongColumnSelector timeSelector, + TestFloatColumnSelector selector, + DoubleFirstBufferAggregator agg, + ByteBuffer buff, + int position + ) { agg.aggregate(buff, position); timeSelector.increment(); @@ -197,7 +209,13 @@ private void aggregate(TestLongColumnSelector timeSelector, TestLongColumnSelect selector.increment(); } - private void aggregate(TestLongColumnSelector timeSelector, TestLongColumnSelector selector, LongFirstBufferAggregator agg, ByteBuffer buff, int position) + private void aggregate( + TestLongColumnSelector timeSelector, + TestLongColumnSelector selector, + LongFirstBufferAggregator agg, + ByteBuffer buff, + int position + ) { agg.aggregate(buff, position); timeSelector.increment(); diff --git a/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java index 9bef23d347bd..c3bd2ce27bec 100644 --- a/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java @@ -20,6 +20,7 @@ package io.druid.query.aggregation; import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.column.Column; @@ -41,7 +42,7 @@ public class LastAggregationTest private long[] longValues = {62, 8, 54, 2}; private float[] floatValues = {1.1f, 2.7f, 3.5f, 1.3f}; - private long[] times = {1467225096, 146722598, 1467225099, 1467225111}; + private long[] times = {1467225096, 1467225098, 1467225099, 1467225111}; public LastAggregationTest() throws Exception { @@ -156,8 +157,8 @@ public void testLongLastBufferAggregator() @Test public void testCombine() { - Pair pair1 = new Pair<>(1467225000L, 3.621); - Pair pair2 = new Pair<>(1467240000L, 785.4); + SerializablePair pair1 = new SerializablePair<>(1467225000L, 3.621); + SerializablePair pair2 = new SerializablePair<>(1467240000L, 785.4); Assert.assertEquals(pair2, doubleLastAggFactory.combine(pair1, pair2)); } diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index 64ea3295fed8..7640c1e99e65 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -1624,14 +1624,7 @@ public void testGroupByWithFirstLast() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions( - Arrays.asList( - new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension - ) - ) - ) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"))) .setAggregatorSpecs( Arrays.asList( new FirstAggregatorFactory("first", "index", "long"), @@ -5254,6 +5247,49 @@ public void testSubqueryWithHyperUniquesPostAggregator() TestHelper.assertExpectedObjects(expectedResults, results, ""); } + @Test + public void testSubqueryWithFirstLast() + { + GroupByQuery subquery = GroupByQuery + .builder() + .setDataSource(QueryRunnerTestHelper.dataSource) + .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) + .setDimensions(ImmutableList.of(new DefaultDimensionSpec("market", "market"))) + .setAggregatorSpecs( + ImmutableList.of( + QueryRunnerTestHelper.rowsCount, + new FirstAggregatorFactory("innerfirst", "index", "long"), + new LastAggregatorFactory("innerlast", "index", "long") + ) + ) + .setGranularity(QueryRunnerTestHelper.dayGran) + .build(); + + GroupByQuery query = GroupByQuery + .builder() + .setDataSource(subquery) + .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) + .setDimensions(Lists.newArrayList()) + .setAggregatorSpecs( + ImmutableList.of( + new FirstAggregatorFactory("first", "innerfirst", "long"), + new LastAggregatorFactory("last", "innerlast", "long") + ) + ) + .setGranularity(QueryRunnerTestHelper.monthGran) + .build(); + + List expectedResults = Arrays.asList( + GroupByQueryRunnerTestHelper.createExpectedRow("2011-01-01", "first", 100L, "last", 943L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-02-01", "first", 132L, "last", 1101L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-01", "first", 153L, "last", 1063L), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "first", 135L, "last", 780L) + ); + + Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + TestHelper.assertExpectedObjects(expectedResults, results, ""); + } + @Test public void testGroupByWithTimeColumn() { diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 274c4acaa21e..5883c1ee35a9 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -36,6 +36,8 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; +import io.druid.query.aggregation.FirstAggregatorFactory; +import io.druid.query.aggregation.LastAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.extraction.MapLookupExtractor; @@ -1732,6 +1734,114 @@ public void testTimeseriesWithMultiValueFilteringJavascriptAggregatorAndAlsoRegu assertExpectedResults(expectedResults, actualResults); } + @Test + public void testTimeseriesWithFirstLastAggregator() + { + TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.monthGran) + .intervals(QueryRunnerTestHelper.fullOnInterval) + .aggregators( + ImmutableList.of( + new FirstAggregatorFactory("first", "index", "double"), + new LastAggregatorFactory("last", "index", "double") + ) + ) + .descending(descending) + .build(); + + // There's a difference between ascending and descending results since granularity of druid.sample.tsv is days, + // with multiple first and last times. The traversal order difference cause the first and last aggregator + // to select different value from the list of first and last dates + List> expectedAscendingResults = ImmutableList.of( + new Result<>( + new DateTime("2011-01-01"), + new TimeseriesResultValue( + ImmutableMap.of( + "first", new Float(100.000000).doubleValue(), + "last", new Float(943.497198).doubleValue() + ) + ) + ), + new Result<>( + new DateTime("2011-02-01"), + new TimeseriesResultValue( + ImmutableMap.of( + "first", new Float(132.123776).doubleValue(), + "last", new Float(1101.918270).doubleValue() + ) + ) + ), + new Result<>( + new DateTime("2011-03-01"), + new TimeseriesResultValue( + ImmutableMap.of( + "first", new Float(153.059937).doubleValue(), + "last", new Float(1063.201156).doubleValue() + ) + ) + ), + new Result<>( + new DateTime("2011-04-01"), + new TimeseriesResultValue( + ImmutableMap.of( + "first", new Float(135.885094).doubleValue(), + "last", new Float(780.271977).doubleValue() + ) + ) + ) + ); + + List> expectedDescendingResults = ImmutableList.of( + new Result<>( + new DateTime("2011-04-01"), + new TimeseriesResultValue( + ImmutableMap.of( + "first", new Float(1234.247546).doubleValue(), + "last", new Float(106.793700).doubleValue() + ) + ) + ), + new Result<>( + new DateTime("2011-03-01"), + new TimeseriesResultValue( + ImmutableMap.of( + "first", new Float(1004.940887).doubleValue(), + "last", new Float(151.752485).doubleValue() + ) + ) + ), + new Result<>( + new DateTime("2011-02-01"), + new TimeseriesResultValue( + ImmutableMap.of( + "first", new Float(913.561076).doubleValue(), + "last", new Float(122.258195).doubleValue() + ) + ) + ), + new Result<>( + new DateTime("2011-01-01"), + new TimeseriesResultValue( + ImmutableMap.of( + "first", new Float(800.000000).doubleValue(), + "last", new Float(133.740047).doubleValue() + ) + ) + ) + ); + + Iterable> actualResults = Sequences.toList( + runner.run(query, CONTEXT), + Lists.>newArrayList() + ); + if (descending) { + TestHelper.assertExpectedResults(expectedDescendingResults, actualResults); + } else { + TestHelper.assertExpectedResults(expectedAscendingResults, actualResults); + } + } + @Test public void testTimeseriesWithMultiValueDimFilter1() { diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 90145afe832d..31ff67be7498 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -47,6 +47,8 @@ import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; +import io.druid.query.aggregation.FirstAggregatorFactory; +import io.druid.query.aggregation.LastAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator; @@ -449,6 +451,51 @@ public void testTopNOverHyperUniqueFinalizingPostAggregator() assertExpectedResults(expectedResults, query); } + @Test + public void testTopNOverFirstLastAggregator() + { + TopNQuery query = new TopNQueryBuilder() + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .metric("last") + .threshold(3) + .intervals(QueryRunnerTestHelper.fullOnInterval) + .aggregators( + Arrays.asList( + new FirstAggregatorFactory("first", "index", "long"), + new LastAggregatorFactory("last", "index", "long") + ) + ) + .build(); + + List> expectedResults = Arrays.asList( + new Result<>( + new DateTime("2011-01-12T00:00:00.000Z"), + new TopNResultValue( + Arrays.>asList( + ImmutableMap.builder() + .put("market", "spot") + .put("first", 100L) + .put("last", 120L) + .build(), + ImmutableMap.builder() + .put("market", "total_market") + .put("first", 1000L) + .put("last", 1029L) + .build(), + ImmutableMap.builder() + .put("market", "upfront") + .put("first", 800L) + .put("last", 780L) + .build() + ) + ) + ) + ); + assertExpectedResults(expectedResults, query); + } + @Test public void testTopNBySegment() { From 4d96703a5acaace062d4e3874c6b78ffda5403ae Mon Sep 17 00:00:00 2001 From: Dave Li Date: Wed, 13 Jul 2016 17:55:28 -0700 Subject: [PATCH 03/10] moving around --- .../src/main/java/io/druid/jackson/AggregatorsModule.java | 4 ++-- .../aggregation/{ => first}/DoubleFirstAggregator.java | 3 ++- .../{ => first}/DoubleFirstBufferAggregator.java | 3 ++- .../aggregation/{ => first}/FirstAggregatorFactory.java | 6 +++++- .../query/aggregation/{ => first}/LongFirstAggregator.java | 3 ++- .../aggregation/{ => first}/LongFirstBufferAggregator.java | 3 ++- .../query/aggregation/{ => last}/DoubleLastAggregator.java | 3 ++- .../aggregation/{ => last}/DoubleLastBufferAggregator.java | 3 ++- .../query/aggregation/{ => last}/LastAggregatorFactory.java | 6 +++++- .../query/aggregation/{ => last}/LongLastAggregator.java | 3 ++- .../aggregation/{ => last}/LongLastBufferAggregator.java | 3 ++- .../io/druid/query/aggregation/FirstAggregationTest.java | 5 +++++ .../io/druid/query/aggregation/LastAggregationTest.java | 5 +++++ .../java/io/druid/query/groupby/GroupByQueryRunnerTest.java | 4 ++-- .../druid/query/timeseries/TimeseriesQueryRunnerTest.java | 4 ++-- .../test/java/io/druid/query/topn/TopNQueryRunnerTest.java | 4 ++-- 16 files changed, 44 insertions(+), 18 deletions(-) rename processing/src/main/java/io/druid/query/aggregation/{ => first}/DoubleFirstAggregator.java (95%) rename processing/src/main/java/io/druid/query/aggregation/{ => first}/DoubleFirstBufferAggregator.java (96%) rename processing/src/main/java/io/druid/query/aggregation/{ => first}/FirstAggregatorFactory.java (97%) rename processing/src/main/java/io/druid/query/aggregation/{ => first}/LongFirstAggregator.java (95%) rename processing/src/main/java/io/druid/query/aggregation/{ => first}/LongFirstBufferAggregator.java (96%) rename processing/src/main/java/io/druid/query/aggregation/{ => last}/DoubleLastAggregator.java (95%) rename processing/src/main/java/io/druid/query/aggregation/{ => last}/DoubleLastBufferAggregator.java (96%) rename processing/src/main/java/io/druid/query/aggregation/{ => last}/LastAggregatorFactory.java (97%) rename processing/src/main/java/io/druid/query/aggregation/{ => last}/LongLastAggregator.java (95%) rename processing/src/main/java/io/druid/query/aggregation/{ => last}/LongLastBufferAggregator.java (96%) diff --git a/processing/src/main/java/io/druid/jackson/AggregatorsModule.java b/processing/src/main/java/io/druid/jackson/AggregatorsModule.java index cd48b623dfb9..d4b0fa1dc9bb 100644 --- a/processing/src/main/java/io/druid/jackson/AggregatorsModule.java +++ b/processing/src/main/java/io/druid/jackson/AggregatorsModule.java @@ -29,10 +29,10 @@ import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.FirstAggregatorFactory; +import io.druid.query.aggregation.first.FirstAggregatorFactory; import io.druid.query.aggregation.HistogramAggregatorFactory; import io.druid.query.aggregation.JavaScriptAggregatorFactory; -import io.druid.query.aggregation.LastAggregatorFactory; +import io.druid.query.aggregation.last.LastAggregatorFactory; import io.druid.query.aggregation.LongMaxAggregatorFactory; import io.druid.query.aggregation.LongMinAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregator.java similarity index 95% rename from processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java rename to processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregator.java index cf590b747502..352536ed7b41 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregator.java @@ -17,9 +17,10 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.first; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstBufferAggregator.java similarity index 96% rename from processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java rename to processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstBufferAggregator.java index e47b870595f4..8349dad65f1d 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleFirstBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstBufferAggregator.java @@ -17,10 +17,11 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.first; import com.google.common.primitives.Longs; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; diff --git a/processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/FirstAggregatorFactory.java similarity index 97% rename from processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java rename to processing/src/main/java/io/druid/query/aggregation/first/FirstAggregatorFactory.java index 1fd32578136b..9ffb9a4a71f9 100644 --- a/processing/src/main/java/io/druid/query/aggregation/FirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/FirstAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.first; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; @@ -27,6 +27,10 @@ import com.metamx.common.IAE; import com.metamx.common.StringUtils; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; +import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; import io.druid.segment.column.Column; diff --git a/processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregator.java similarity index 95% rename from processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java rename to processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregator.java index 1484da1fa5a5..8c185015549c 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongFirstAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregator.java @@ -17,9 +17,10 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.first; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; import io.druid.segment.LongColumnSelector; public class LongFirstAggregator implements Aggregator diff --git a/processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstBufferAggregator.java similarity index 96% rename from processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java rename to processing/src/main/java/io/druid/query/aggregation/first/LongFirstBufferAggregator.java index 3aae1713ca57..e7f750660f42 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongFirstBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstBufferAggregator.java @@ -17,10 +17,11 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.first; import com.google.common.primitives.Longs; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.LongColumnSelector; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregator.java similarity index 95% rename from processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java rename to processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregator.java index a0b7ecf30288..951051179b98 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleLastAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregator.java @@ -17,9 +17,10 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.last; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastBufferAggregator.java similarity index 96% rename from processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java rename to processing/src/main/java/io/druid/query/aggregation/last/DoubleLastBufferAggregator.java index 96c95df83cf4..e02118e4ae00 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleLastBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastBufferAggregator.java @@ -17,10 +17,11 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.last; import com.google.common.primitives.Longs; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; diff --git a/processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LastAggregatorFactory.java similarity index 97% rename from processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java rename to processing/src/main/java/io/druid/query/aggregation/last/LastAggregatorFactory.java index e6155b67a8c2..97e7dbef0262 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LastAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.last; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; @@ -27,6 +27,10 @@ import com.metamx.common.IAE; import com.metamx.common.StringUtils; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; +import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; import io.druid.segment.column.Column; diff --git a/processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregator.java similarity index 95% rename from processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java rename to processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregator.java index 5d7a173d9888..a728714011c4 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongLastAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregator.java @@ -17,9 +17,10 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.last; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; import io.druid.segment.LongColumnSelector; public class LongLastAggregator implements Aggregator diff --git a/processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastBufferAggregator.java similarity index 96% rename from processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java rename to processing/src/main/java/io/druid/query/aggregation/last/LongLastBufferAggregator.java index 41d601cbf4e9..8bc09828eeb4 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongLastBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastBufferAggregator.java @@ -17,10 +17,11 @@ * under the License. */ -package io.druid.query.aggregation; +package io.druid.query.aggregation.last; import com.google.common.primitives.Longs; import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.LongColumnSelector; import java.nio.ByteBuffer; diff --git a/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java index 1f1f501cbdb1..7f4d6b523ef5 100644 --- a/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java @@ -22,6 +22,11 @@ import com.metamx.common.Pair; import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; +import io.druid.query.aggregation.first.DoubleFirstAggregator; +import io.druid.query.aggregation.first.DoubleFirstBufferAggregator; +import io.druid.query.aggregation.first.FirstAggregatorFactory; +import io.druid.query.aggregation.first.LongFirstAggregator; +import io.druid.query.aggregation.first.LongFirstBufferAggregator; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.column.Column; import org.easymock.EasyMock; diff --git a/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java index c3bd2ce27bec..cdd256919da2 100644 --- a/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java @@ -22,6 +22,11 @@ import com.metamx.common.Pair; import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; +import io.druid.query.aggregation.last.DoubleLastAggregator; +import io.druid.query.aggregation.last.DoubleLastBufferAggregator; +import io.druid.query.aggregation.last.LastAggregatorFactory; +import io.druid.query.aggregation.last.LongLastAggregator; +import io.druid.query.aggregation.last.LongLastBufferAggregator; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.column.Column; import org.easymock.EasyMock; diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index 7640c1e99e65..e39d4b300295 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -59,9 +59,9 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.FirstAggregatorFactory; +import io.druid.query.aggregation.first.FirstAggregatorFactory; import io.druid.query.aggregation.JavaScriptAggregatorFactory; -import io.druid.query.aggregation.LastAggregatorFactory; +import io.druid.query.aggregation.last.LastAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 5883c1ee35a9..914e351dd934 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -36,8 +36,8 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.FirstAggregatorFactory; -import io.druid.query.aggregation.LastAggregatorFactory; +import io.druid.query.aggregation.first.FirstAggregatorFactory; +import io.druid.query.aggregation.last.LastAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.extraction.MapLookupExtractor; diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 31ff67be7498..9ebac88ca45b 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -47,8 +47,8 @@ import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.FirstAggregatorFactory; -import io.druid.query.aggregation.LastAggregatorFactory; +import io.druid.query.aggregation.first.FirstAggregatorFactory; +import io.druid.query.aggregation.last.LastAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator; From 237b5522d8e92837a6281ac300a893ba7fea0791 Mon Sep 17 00:00:00 2001 From: Dave Li Date: Tue, 26 Jul 2016 19:06:45 -0700 Subject: [PATCH 04/10] separate aggregator valueType --- .../druid/collections/SerializablePair.java | 4 +- .../collections/SerializablePairTest.java | 51 +++ docs/content/querying/aggregations.md | 52 +++ .../io/druid/jackson/AggregatorsModule.java | 12 +- .../first/DoubleFirstAggregatorFactory.java | 249 +++++++++++++ .../first/FirstAggregatorFactory.java | 342 ------------------ .../first/LongFirstAggregatorFactory.java | 248 +++++++++++++ .../last/DoubleLastAggregatorFactory.java | 250 +++++++++++++ .../last/LastAggregatorFactory.java | 339 ----------------- .../last/LongLastAggregatorFactory.java | 249 +++++++++++++ .../aggregation/FirstAggregationTest.java | 32 +- .../aggregation/LastAggregationTest.java | 35 +- .../query/groupby/GroupByQueryRunnerTest.java | 41 ++- .../GroupByTimeseriesQueryRunnerTest.java | 7 + .../timeseries/TimeseriesQueryRunnerTest.java | 46 ++- .../druid/query/topn/TopNQueryRunnerTest.java | 119 +++++- 16 files changed, 1346 insertions(+), 730 deletions(-) create mode 100644 common/src/test/java/io/druid/collections/SerializablePairTest.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java delete mode 100644 processing/src/main/java/io/druid/query/aggregation/first/FirstAggregatorFactory.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java delete mode 100644 processing/src/main/java/io/druid/query/aggregation/last/LastAggregatorFactory.java create mode 100644 processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java diff --git a/common/src/main/java/io/druid/collections/SerializablePair.java b/common/src/main/java/io/druid/collections/SerializablePair.java index 8f3f574be8a2..0774ca066278 100644 --- a/common/src/main/java/io/druid/collections/SerializablePair.java +++ b/common/src/main/java/io/druid/collections/SerializablePair.java @@ -19,12 +19,14 @@ package io.druid.collections; +import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.metamx.common.Pair; public class SerializablePair extends Pair { - public SerializablePair(@JsonProperty T1 lhs, @JsonProperty T2 rhs) + @JsonCreator + public SerializablePair(@JsonProperty("lhs") T1 lhs, @JsonProperty("rhs") T2 rhs) { super(lhs, rhs); } diff --git a/common/src/test/java/io/druid/collections/SerializablePairTest.java b/common/src/test/java/io/druid/collections/SerializablePairTest.java new file mode 100644 index 000000000000..94586820862f --- /dev/null +++ b/common/src/test/java/io/druid/collections/SerializablePairTest.java @@ -0,0 +1,51 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.collections; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; + +public class SerializablePairTest +{ + private static final ObjectMapper jsonMapper = new ObjectMapper(); + + @Test + public void testBytesSerde() throws IOException + { + SerializablePair pair = new SerializablePair<>(5L, 9L); + byte[] bytes = jsonMapper.writeValueAsBytes(pair); + SerializablePair deserializedPair = jsonMapper.readValue(bytes, SerializablePair.class); + Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue()); + Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue()); + } + + @Test + public void testStringSerde() throws IOException + { + SerializablePair pair = new SerializablePair<>(5L, 9L); + String str = jsonMapper.writeValueAsString(pair); + SerializablePair deserializedPair = jsonMapper.readValue(str, SerializablePair.class); + Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue()); + Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue()); + } +} diff --git a/docs/content/querying/aggregations.md b/docs/content/querying/aggregations.md index 507f9da12cef..c46750cbb5bb 100644 --- a/docs/content/querying/aggregations.md +++ b/docs/content/querying/aggregations.md @@ -76,6 +76,58 @@ Computes the sum of values as 64-bit floating point value. Similar to `longSum` { "type" : "longMax", "name" : , "fieldName" : } ``` +### First / Last aggregator + +First and Last aggregator cannot be used in ingestion spec, and should only be specified as part of queries. + +#### `doubleFirst` aggregator + +`doubleFirst` computes the metric value with the minimum timestamp or 0 if no row exist + +```json +{ + "type" : "doubleFirst", + "name" : , + "fieldName" : +} +``` + +#### `doubleLast` aggregator + +`doubleLast` computes the metric value with the maximum timestamp or 0 if no row exist + +```json +{ + "type" : "doubleLast", + "name" : , + "fieldName" : +} +``` + +#### `longFirst` aggregator + +`longFirst` computes the metric value with the minimum timestamp or 0 if no row exist + +```json +{ + "type" : "longFirst", + "name" : , + "fieldName" : +} +``` + +#### `longLast` aggregator + +`longLast` computes the metric value with the maximum timestamp or 0 if no row exist + +```json +{ + "type" : "longLast", + "name" : , + "fieldName" : , +} +``` + ### JavaScript aggregator Computes an arbitrary JavaScript function over a set of columns (both metrics and dimensions are allowed). Your diff --git a/processing/src/main/java/io/druid/jackson/AggregatorsModule.java b/processing/src/main/java/io/druid/jackson/AggregatorsModule.java index d4b0fa1dc9bb..563b5c342d25 100644 --- a/processing/src/main/java/io/druid/jackson/AggregatorsModule.java +++ b/processing/src/main/java/io/druid/jackson/AggregatorsModule.java @@ -29,10 +29,11 @@ import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.first.FirstAggregatorFactory; +import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; import io.druid.query.aggregation.HistogramAggregatorFactory; import io.druid.query.aggregation.JavaScriptAggregatorFactory; -import io.druid.query.aggregation.last.LastAggregatorFactory; +import io.druid.query.aggregation.first.LongFirstAggregatorFactory; +import io.druid.query.aggregation.last.DoubleLastAggregatorFactory; import io.druid.query.aggregation.LongMaxAggregatorFactory; import io.druid.query.aggregation.LongMinAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -41,6 +42,7 @@ import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; +import io.druid.query.aggregation.last.LongLastAggregatorFactory; import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.FieldAccessPostAggregator; @@ -78,8 +80,10 @@ public AggregatorsModule() @JsonSubTypes.Type(name = "hyperUnique", value = HyperUniquesAggregatorFactory.class), @JsonSubTypes.Type(name = "cardinality", value = CardinalityAggregatorFactory.class), @JsonSubTypes.Type(name = "filtered", value = FilteredAggregatorFactory.class), - @JsonSubTypes.Type(name = "first", value = FirstAggregatorFactory.class), - @JsonSubTypes.Type(name = "last", value = LastAggregatorFactory.class) + @JsonSubTypes.Type(name = "longFirst", value = LongFirstAggregatorFactory.class), + @JsonSubTypes.Type(name = "doubleFirst", value = DoubleFirstAggregatorFactory.class), + @JsonSubTypes.Type(name = "longLast", value = LongLastAggregatorFactory.class), + @JsonSubTypes.Type(name = "doubleLast", value = DoubleLastAggregatorFactory.class) }) public static interface AggregatorFactoryMixin { diff --git a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java new file mode 100644 index 000000000000..63e5d2703893 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java @@ -0,0 +1,249 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation.first; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import com.google.common.primitives.Doubles; +import com.google.common.primitives.Longs; +import com.metamx.common.StringUtils; +import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; +import io.druid.query.aggregation.BufferAggregator; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ObjectColumnSelector; +import io.druid.segment.column.Column; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +public class DoubleFirstAggregatorFactory extends AggregatorFactory +{ + private static final byte CACHE_TYPE_ID = 16; + + private final String fieldName; + private final String name; + + @JsonCreator + public DoubleFirstAggregatorFactory( + @JsonProperty("name") String name, + @JsonProperty("fieldName") final String fieldName + ) + { + Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); + Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); + + this.name = name; + this.fieldName = fieldName; + } + + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + return new DoubleFirstAggregator( + name, metricFactory.makeFloatColumnSelector(fieldName), + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + ); + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + return new DoubleFirstBufferAggregator( + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeFloatColumnSelector(fieldName) + ); + } + + @Override + public Comparator getComparator() + { + return new Comparator() + { + @Override + public int compare(Object o1, Object o2) + { + return Doubles.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); + } + }; + } + + @Override + public Object combine(Object lhs, Object rhs) + { + return (((SerializablePair) lhs).lhs <= ((SerializablePair) rhs).lhs) ? lhs : rhs; + } + + @Override + public AggregatorFactory getCombiningFactory() + { + return new DoubleFirstAggregatorFactory(name, name) + { + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + return new DoubleFirstAggregator(name, null, null) + { + @Override + public void aggregate() + { + SerializablePair pair = (SerializablePair) selector.get(); + if (pair.lhs < firstTime) { + firstTime = pair.lhs; + firstValue = pair.rhs; + } + } + }; + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + return new DoubleFirstBufferAggregator(null, null) + { + @Override + public void aggregate(ByteBuffer buf, int position) + { + SerializablePair pair = (SerializablePair) selector.get(); + long firstTime = buf.getLong(position); + if (pair.lhs < firstTime) { + buf.putLong(position, pair.lhs); + buf.putDouble(position + Longs.BYTES, pair.rhs); + } + } + }; + } + }; + } + + @Override + public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException + { + if (other.getName().equals(this.getName()) && this.getClass() == other.getClass()) { + return getCombiningFactory(); + } else { + throw new AggregatorFactoryNotMergeableException(this, other); + } + } + + @Override + public List getRequiredColumns() + { + return Arrays.asList(new DoubleFirstAggregatorFactory(fieldName, fieldName)); + } + + @Override + public Object deserialize(Object object) + { + Map map = (Map) object; + return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).doubleValue()); + } + + @Override + public Object finalizeComputation(Object object) + { + return ((SerializablePair) object).rhs; + } + + @Override + @JsonProperty + public String getName() + { + return name; + } + + @JsonProperty + public String getFieldName() + { + return fieldName; + } + + @Override + public List requiredFields() + { + return Arrays.asList(Column.TIME_COLUMN_NAME, fieldName); + } + + @Override + public byte[] getCacheKey() + { + byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); + + return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); + } + + @Override + public String getTypeName() + { + return "float"; + } + + @Override + public int getMaxIntermediateSize() + { + return Longs.BYTES + Doubles.BYTES; + } + + @Override + public Object getAggregatorStartValue() + { + throw new UnsupportedOperationException(); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + DoubleFirstAggregatorFactory that = (DoubleFirstAggregatorFactory) o; + + return fieldName.equals(that.fieldName) && name.equals(that.name); + } + + @Override + public int hashCode() + { + int result = name.hashCode(); + result = 31 * result + fieldName.hashCode(); + return result; + } + + @Override + public String toString() + { + return "DoubleFirstAggregatorFactory{" + + "name='" + name + '\'' + + ", fieldName='" + fieldName + '\'' + + '}'; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/first/FirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/FirstAggregatorFactory.java deleted file mode 100644 index 9ffb9a4a71f9..000000000000 --- a/processing/src/main/java/io/druid/query/aggregation/first/FirstAggregatorFactory.java +++ /dev/null @@ -1,342 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.query.aggregation.first; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.primitives.Doubles; -import com.google.common.primitives.Longs; -import com.metamx.common.IAE; -import com.metamx.common.StringUtils; -import io.druid.collections.SerializablePair; -import io.druid.query.aggregation.Aggregator; -import io.druid.query.aggregation.AggregatorFactory; -import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; -import io.druid.query.aggregation.BufferAggregator; -import io.druid.segment.ColumnSelectorFactory; -import io.druid.segment.ObjectColumnSelector; -import io.druid.segment.column.Column; - -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Map; - -public class FirstAggregatorFactory extends AggregatorFactory -{ - private static final byte CACHE_TYPE_ID = 0x10; - private static final String TYPE_LONG = "long"; - private static final String TYPE_DOUBLE = "double"; - - private final String fieldName; - private final String name; - private final String value; - - @JsonCreator - public FirstAggregatorFactory( - @JsonProperty("name") String name, - @JsonProperty("fieldName") final String fieldName, - @JsonProperty("value") String value - ) - { - Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); - Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); - Preconditions.checkArgument( - value.equals(TYPE_LONG) || value.equals(TYPE_DOUBLE), - "Must have a valid, non-null type" - ); - - this.name = name; - this.fieldName = fieldName; - this.value = value; - } - - @Override - public Aggregator factorize(ColumnSelectorFactory metricFactory) - { - if (value.equals(TYPE_LONG)) { - return new LongFirstAggregator( - name, metricFactory.makeLongColumnSelector(fieldName), - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) - ); - } else if (value.equals(TYPE_DOUBLE)) { - return new DoubleFirstAggregator( - name, metricFactory.makeFloatColumnSelector(fieldName), - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) - ); - } - throw new IAE("undefined type"); - } - - @Override - public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) - { - if (value.equals(TYPE_LONG)) { - return new LongFirstBufferAggregator( - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), - metricFactory.makeLongColumnSelector(fieldName) - ); - } else if (value.equals(TYPE_DOUBLE)) { - return new DoubleFirstBufferAggregator( - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), - metricFactory.makeFloatColumnSelector(fieldName) - ); - } - throw new IAE("undefined type"); - } - - @Override - public Comparator getComparator() - { - return new Comparator() - { - @Override - public int compare(Object o1, Object o2) - { - return Longs.compare(((SerializablePair) o1).lhs, ((SerializablePair) o2).lhs); - } - }; - } - - @Override - public Object combine(Object lhs, Object rhs) - { - return (((SerializablePair) lhs).lhs <= ((SerializablePair) rhs).lhs) ? lhs : rhs; - } - - @Override - public AggregatorFactory getCombiningFactory() - { - return new FirstAggregatorFactory(name, name, value) - { - @Override - public Aggregator factorize(ColumnSelectorFactory metricFactory) - { - final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); - if (value.equals(TYPE_LONG)) { - return new LongFirstAggregator(name, null, null) - { - @Override - public void aggregate() - { - SerializablePair pair = (SerializablePair) selector.get(); - if (pair.lhs < firstTime) { - firstTime = pair.lhs; - firstValue = pair.rhs; - } - } - }; - } else if (value.equals(TYPE_DOUBLE)) { - return new DoubleFirstAggregator(name, null, null) - { - @Override - public void aggregate() - { - SerializablePair pair = (SerializablePair) selector.get(); - if (pair.lhs < firstTime) { - firstTime = pair.lhs; - firstValue = pair.rhs; - } - } - }; - } - throw new IAE("undefined type"); - } - - @Override - public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) - { - final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); - if (value.equals(TYPE_LONG)) { - return new LongFirstBufferAggregator(null, null) - { - @Override - public void aggregate(ByteBuffer buf, int position) - { - SerializablePair pair = (SerializablePair) selector.get(); - long firstTime = buf.getLong(position); - if (pair.lhs < firstTime) { - buf.putLong(position, pair.lhs); - buf.putLong(position + Longs.BYTES, pair.rhs); - } - } - }; - } else if (value.equals(TYPE_DOUBLE)) { - return new DoubleFirstBufferAggregator(null, null) - { - @Override - public void aggregate(ByteBuffer buf, int position) - { - SerializablePair pair = (SerializablePair) selector.get(); - long firstTime = buf.getLong(position); - if (pair.lhs < firstTime) { - buf.putLong(position, pair.lhs); - buf.putDouble(position + Longs.BYTES, pair.rhs); - } - } - }; - } - throw new IAE("undefined type"); - } - }; - } - - @Override - public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException - { - if (other.getName().equals(this.getName()) && this.getClass() == other.getClass() && - other.getTypeName().equals(this.getTypeName())) { - return getCombiningFactory(); - } else { - throw new AggregatorFactoryNotMergeableException(this, other); - } - } - - @Override - public List getRequiredColumns() - { - return Arrays.asList(new FirstAggregatorFactory(fieldName, fieldName, value)); - } - - @Override - public Object deserialize(Object object) - { - Map map = (Map) object; - if (value.equals(TYPE_LONG)) { - return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).longValue()); - } else if (value.equals(TYPE_DOUBLE)) { - return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).doubleValue()); - } - throw new IAE("undefined type"); - } - - @Override - public Object finalizeComputation(Object object) - { - return ((SerializablePair) object).rhs; - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public String getFieldName() - { - return fieldName; - } - - @JsonProperty - public String getValue() - { - return value; - } - - @Override - public List requiredFields() - { - return Arrays.asList(Column.TIME_COLUMN_NAME, fieldName); - } - - @Override - public byte[] getCacheKey() - { - byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - - return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); - } - - @Override - public String getTypeName() - { - return value.equals(TYPE_DOUBLE) ? "float" : value; - } - - @Override - public int getMaxIntermediateSize() - { - if (value.equals(TYPE_LONG)) { - return Longs.BYTES * 2; - } else if (value.equals(TYPE_DOUBLE)) { - return Longs.BYTES + Doubles.BYTES; - } - throw new IAE("undefined type"); - } - - @Override - public Object getAggregatorStartValue() - { - if (value.equals(TYPE_LONG)) { - return new SerializablePair<>(Long.MAX_VALUE, 0L); - } else if (value.equals(TYPE_DOUBLE)) { - return new SerializablePair<>(Long.MAX_VALUE, 0D); - } - throw new IAE("undefined type"); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - FirstAggregatorFactory that = (FirstAggregatorFactory) o; - - if (!fieldName.equals(that.fieldName)) { - return false; - } - if (!name.equals(that.name)) { - return false; - } - if (!value.equals(that.value)) { - return false; - } - - return true; - } - - @Override - public int hashCode() - { - int result = name.hashCode(); - result = 31 * result + fieldName.hashCode(); - result = 31 * result + value.hashCode(); - return result; - } - - @Override - public String toString() - { - return "FirstAggregatorFactory{" + - "name='" + name + '\'' + - ", fieldName='" + fieldName + '\'' + - ", value='" + value + '\'' + - '}'; - } -} diff --git a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java new file mode 100644 index 000000000000..c9ec95955f4a --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java @@ -0,0 +1,248 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation.first; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import com.google.common.primitives.Longs; +import com.metamx.common.StringUtils; +import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; +import io.druid.query.aggregation.BufferAggregator; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ObjectColumnSelector; +import io.druid.segment.column.Column; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +public class LongFirstAggregatorFactory extends AggregatorFactory +{ + private static final byte CACHE_TYPE_ID = 17; + + private final String fieldName; + private final String name; + + @JsonCreator + public LongFirstAggregatorFactory( + @JsonProperty("name") String name, + @JsonProperty("fieldName") final String fieldName + ) + { + Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); + Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); + + this.name = name; + this.fieldName = fieldName; + } + + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + return new LongFirstAggregator( + name, metricFactory.makeLongColumnSelector(fieldName), + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + ); + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + return new LongFirstBufferAggregator( + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeLongColumnSelector(fieldName) + ); + } + + @Override + public Comparator getComparator() + { + return new Comparator() + { + @Override + public int compare(Object o1, Object o2) + { + return Longs.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); + } + }; + } + + @Override + public Object combine(Object lhs, Object rhs) + { + return (((SerializablePair) lhs).lhs <= ((SerializablePair) rhs).lhs) ? lhs : rhs; + } + + @Override + public AggregatorFactory getCombiningFactory() + { + return new LongFirstAggregatorFactory(name, name) + { + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + return new LongFirstAggregator(name, null, null) + { + @Override + public void aggregate() + { + SerializablePair pair = (SerializablePair) selector.get(); + if (pair.lhs < firstTime) { + firstTime = pair.lhs; + firstValue = pair.rhs; + } + } + }; + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + return new LongFirstBufferAggregator(null, null) + { + @Override + public void aggregate(ByteBuffer buf, int position) + { + SerializablePair pair = (SerializablePair) selector.get(); + long firstTime = buf.getLong(position); + if (pair.lhs < firstTime) { + buf.putLong(position, pair.lhs); + buf.putLong(position + Longs.BYTES, pair.rhs); + } + } + }; + } + }; + } + + @Override + public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException + { + if (other.getName().equals(this.getName()) && this.getClass() == other.getClass()) { + return getCombiningFactory(); + } else { + throw new AggregatorFactoryNotMergeableException(this, other); + } + } + + @Override + public List getRequiredColumns() + { + return Arrays.asList(new LongFirstAggregatorFactory(fieldName, fieldName)); + } + + @Override + public Object deserialize(Object object) + { + Map map = (Map) object; + return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).longValue()); + } + + @Override + public Object finalizeComputation(Object object) + { + return ((SerializablePair) object).rhs; + } + + @Override + @JsonProperty + public String getName() + { + return name; + } + + @JsonProperty + public String getFieldName() + { + return fieldName; + } + + @Override + public List requiredFields() + { + return Arrays.asList(Column.TIME_COLUMN_NAME, fieldName); + } + + @Override + public byte[] getCacheKey() + { + byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); + + return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); + } + + @Override + public String getTypeName() + { + return "long"; + } + + @Override + public int getMaxIntermediateSize() + { + return Longs.BYTES * 2; + } + + @Override + public Object getAggregatorStartValue() + { + throw new UnsupportedOperationException(); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + LongFirstAggregatorFactory that = (LongFirstAggregatorFactory) o; + + return fieldName.equals(that.fieldName) && name.equals(that.name); + } + + @Override + public int hashCode() + { + int result = name.hashCode(); + result = 31 * result + fieldName.hashCode(); + return result; + } + + @Override + public String toString() + { + return "LongFirstAggregatorFactory{" + + "name='" + name + '\'' + + ", fieldName='" + fieldName + '\'' + + '}'; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java new file mode 100644 index 000000000000..9069740436b1 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java @@ -0,0 +1,250 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation.last; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import com.google.common.primitives.Doubles; +import com.google.common.primitives.Longs; +import com.metamx.common.IAE; +import com.metamx.common.StringUtils; +import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; +import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.first.LongFirstAggregatorFactory; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ObjectColumnSelector; +import io.druid.segment.column.Column; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +public class DoubleLastAggregatorFactory extends AggregatorFactory +{ + private static final byte CACHE_TYPE_ID = 18; + + private final String fieldName; + private final String name; + + @JsonCreator + public DoubleLastAggregatorFactory( + @JsonProperty("name") String name, + @JsonProperty("fieldName") final String fieldName + ) + { + Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); + Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); + this.name = name; + this.fieldName = fieldName; + } + + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + return new DoubleLastAggregator( + name, metricFactory.makeFloatColumnSelector(fieldName), + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + ); + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + return new DoubleLastBufferAggregator( + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeFloatColumnSelector(fieldName) + ); + } + + @Override + public Comparator getComparator() + { + return new Comparator() + { + @Override + public int compare(Object o1, Object o2) + { + return Doubles.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); + } + }; + } + + @Override + public Object combine(Object lhs, Object rhs) + { + return (((SerializablePair) lhs).lhs > ((SerializablePair) rhs).lhs) ? lhs : rhs; + } + + @Override + public AggregatorFactory getCombiningFactory() + { + return new DoubleLastAggregatorFactory(name, name) + { + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + return new DoubleLastAggregator(name, null, null) + { + @Override + public void aggregate() + { + SerializablePair pair = (SerializablePair) selector.get(); + if (pair.rhs >= lastTime) { + lastTime = pair.lhs; + lastValue = pair.rhs; + } + } + }; + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + return new DoubleLastBufferAggregator(null, null) + { + @Override + public void aggregate(ByteBuffer buf, int position) + { + SerializablePair pair = (SerializablePair) selector.get(); + long lastTime = buf.getLong(position); + if (pair.lhs >= lastTime) { + buf.putLong(position, pair.lhs); + buf.putDouble(position + Longs.BYTES, pair.rhs); + } + } + }; + } + }; + } + + @Override + public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException + { + if (other.getName().equals(this.getName()) && this.getClass() == other.getClass()) { + return getCombiningFactory(); + } else { + throw new AggregatorFactoryNotMergeableException(this, other); + } + } + + @Override + public List getRequiredColumns() + { + return Arrays.asList(new LongFirstAggregatorFactory(fieldName, fieldName)); + } + + @Override + public Object deserialize(Object object) + { + Map map = (Map) object; + return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).doubleValue()); + } + + @Override + public Object finalizeComputation(Object object) + { + return ((SerializablePair) object).rhs; + } + + @Override + @JsonProperty + public String getName() + { + return name; + } + + @JsonProperty + public String getFieldName() + { + return fieldName; + } + + @Override + public List requiredFields() + { + return Arrays.asList(Column.TIME_COLUMN_NAME, fieldName); + } + + @Override + public byte[] getCacheKey() + { + byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); + + return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); + } + + @Override + public String getTypeName() + { + return "float"; + } + + @Override + public int getMaxIntermediateSize() + { + return Longs.BYTES + Doubles.BYTES; + } + + @Override + public Object getAggregatorStartValue() + { + throw new UnsupportedOperationException(); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + DoubleLastAggregatorFactory that = (DoubleLastAggregatorFactory) o; + + return fieldName.equals(that.fieldName) && name.equals(that.name); + } + + @Override + public int hashCode() + { + int result = name.hashCode(); + result = 31 * result + fieldName.hashCode(); + return result; + } + + @Override + public String toString() + { + return "LongFirstAggregatorFactory{" + + "name='" + name + '\'' + + ", fieldName='" + fieldName + '\'' + + '}'; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LastAggregatorFactory.java deleted file mode 100644 index 97e7dbef0262..000000000000 --- a/processing/src/main/java/io/druid/query/aggregation/last/LastAggregatorFactory.java +++ /dev/null @@ -1,339 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.query.aggregation.last; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.primitives.Doubles; -import com.google.common.primitives.Longs; -import com.metamx.common.IAE; -import com.metamx.common.StringUtils; -import io.druid.collections.SerializablePair; -import io.druid.query.aggregation.Aggregator; -import io.druid.query.aggregation.AggregatorFactory; -import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; -import io.druid.query.aggregation.BufferAggregator; -import io.druid.segment.ColumnSelectorFactory; -import io.druid.segment.ObjectColumnSelector; -import io.druid.segment.column.Column; - -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Map; - -public class LastAggregatorFactory extends AggregatorFactory -{ - private static final byte CACHE_TYPE_ID = 0x11; - private static final String TYPE_LONG = "long"; - private static final String TYPE_DOUBLE = "double"; - - private final String fieldName; - private final String name; - private final String value; - - @JsonCreator - public LastAggregatorFactory( - @JsonProperty("name") String name, - @JsonProperty("fieldName") final String fieldName, - @JsonProperty("value") String value - ) - { - Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); - Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); - Preconditions.checkArgument( - value.equals(TYPE_LONG) || value.equals(TYPE_DOUBLE), - "Must have a valid, non-null type" - ); - - this.name = name; - this.fieldName = fieldName; - this.value = value; - } - - @Override - public Aggregator factorize(ColumnSelectorFactory metricFactory) - { - if (value.equals(TYPE_LONG)) { - return new LongLastAggregator( - name, metricFactory.makeLongColumnSelector(fieldName), - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) - ); - } else if (value.equals(TYPE_DOUBLE)) { - return new DoubleLastAggregator( - name, metricFactory.makeFloatColumnSelector(fieldName), - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) - ); - } - throw new IAE("undefined type"); - } - - @Override - public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) - { - if (value.equals(TYPE_LONG)) { - return new LongLastBufferAggregator( - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), - metricFactory.makeLongColumnSelector(fieldName) - ); - } else if (value.equals(TYPE_DOUBLE)) { - return new DoubleLastBufferAggregator( - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), - metricFactory.makeFloatColumnSelector(fieldName) - ); - } - throw new IAE("undefined type"); - } - - @Override - public Comparator getComparator() - { - return new Comparator() - { - @Override - public int compare(Object o1, Object o2) - { - return Longs.compare(((SerializablePair) o1).lhs, ((SerializablePair) o2).lhs); - } - }; - } - - @Override - public Object combine(Object lhs, Object rhs) - { - return (((SerializablePair) lhs).lhs > ((SerializablePair) rhs).lhs) ? lhs : rhs; - } - - @Override - public AggregatorFactory getCombiningFactory() - { - return new LastAggregatorFactory(name, name, value) - { - @Override - public Aggregator factorize(ColumnSelectorFactory metricFactory) - { - final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); - if (value.equals(TYPE_LONG)) { - return new LongLastAggregator(name, null, null) - { - @Override - public void aggregate() - { - SerializablePair pair = (SerializablePair) selector.get(); - if (pair.rhs >= lastTime) { - lastTime = pair.lhs; - lastValue = pair.rhs; - } - } - }; - } else if (value.equals(TYPE_DOUBLE)) { - return new DoubleLastAggregator(name, null, null) - { - @Override - public void aggregate() - { - SerializablePair pair = (SerializablePair) selector.get(); - if (pair.rhs >= lastTime) { - lastTime = pair.lhs; - lastValue = pair.rhs; - } - } - }; - } - throw new IAE("undefined type"); - } - - @Override - public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) - { - final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); - if (value.equals(TYPE_LONG)) { - return new LongLastBufferAggregator(null, null) - { - @Override - public void aggregate(ByteBuffer buf, int position) - { - SerializablePair pair = (SerializablePair) selector.get(); - long lastTime = buf.getLong(position); - if (pair.lhs >= lastTime) { - buf.putLong(position, pair.lhs); - buf.putLong(position + Longs.BYTES, pair.rhs); - } - } - }; - } else if (value.equals(TYPE_DOUBLE)) { - return new DoubleLastBufferAggregator(null, null) - { - @Override - public void aggregate(ByteBuffer buf, int position) - { - SerializablePair pair = (SerializablePair) selector.get(); - long lastTime = buf.getLong(position); - if (pair.lhs >= lastTime) { - buf.putLong(position, pair.lhs); - buf.putDouble(position + Longs.BYTES, pair.rhs); - } - } - }; - } - throw new IAE("undefined type"); - } - }; - } - - @Override - public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException - { - if (other.getName().equals(this.getName()) && this.getClass() == other.getClass() && - other.getTypeName().equals(this.getTypeName())) { - return getCombiningFactory(); - } else { - throw new AggregatorFactoryNotMergeableException(this, other); - } - } - - @Override - public List getRequiredColumns() - { - return Arrays.asList(new LastAggregatorFactory(fieldName, fieldName, value)); - } - - @Override - public Object deserialize(Object object) - { - Map map = (Map) object; - if (value.equals(TYPE_LONG)) { - return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).longValue()); - } else if (value.equals(TYPE_DOUBLE)) { - return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).doubleValue()); - } - throw new IAE("undefined type"); - } - - @Override - public Object finalizeComputation(Object object) - { - return ((SerializablePair) object).rhs; - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public String getFieldName() - { - return fieldName; - } - - @JsonProperty - public String getValue() - { - return value; - } - - @Override - public List requiredFields() - { - return Arrays.asList(Column.TIME_COLUMN_NAME, fieldName); - } - - @Override - public byte[] getCacheKey() - { - byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - - return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); - } - - @Override - public String getTypeName() - { - return value.equals(TYPE_DOUBLE) ? "float" : value; - } - - @Override - public int getMaxIntermediateSize() - { - if (value.equals(TYPE_LONG)) { - return Longs.BYTES * 2; - } else if (value.equals(TYPE_DOUBLE)) { - return Longs.BYTES + Doubles.BYTES; - } - throw new IAE("undefined type"); - } - - @Override - public Object getAggregatorStartValue() - { - if (value.equals(TYPE_LONG)) { - return new SerializablePair<>(Long.MIN_VALUE, 0L); - } else if (value.equals(TYPE_DOUBLE)) { - return new SerializablePair<>(Long.MIN_VALUE, 0D); - } - throw new IAE("undefined type"); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - LastAggregatorFactory that = (LastAggregatorFactory) o; - - if (!fieldName.equals(that.fieldName)) { - return false; - } - if (!name.equals(that.name)) { - return false; - } - return value.equals(that.value); - - } - - @Override - public int hashCode() - { - int result = name.hashCode(); - result = 31 * result + fieldName.hashCode(); - result = 31 * result + value.hashCode(); - return result; - } - - @Override - public String toString() - { - return "LastAggregatorFactory{" + - "name='" + name + '\'' + - ", fieldName='" + fieldName + '\'' + - ", value='" + value + '\'' + - '}'; - } -} diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java new file mode 100644 index 000000000000..7fe14415fdff --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java @@ -0,0 +1,249 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation.last; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import com.google.common.primitives.Longs; +import com.metamx.common.StringUtils; +import io.druid.collections.SerializablePair; +import io.druid.query.aggregation.Aggregator; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; +import io.druid.query.aggregation.BufferAggregator; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ObjectColumnSelector; +import io.druid.segment.column.Column; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +public class LongLastAggregatorFactory extends AggregatorFactory +{ + private static final byte CACHE_TYPE_ID = 19; + + private final String fieldName; + private final String name; + + @JsonCreator + public LongLastAggregatorFactory( + @JsonProperty("name") String name, + @JsonProperty("fieldName") final String fieldName + ) + { + Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); + Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); + this.name = name; + this.fieldName = fieldName; + } + + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + return new LongLastAggregator( + name, metricFactory.makeLongColumnSelector(fieldName), + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + ); + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + return new LongLastBufferAggregator( + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeLongColumnSelector(fieldName) + ); + } + + @Override + public Comparator getComparator() + { + return new Comparator() + { + @Override + public int compare(Object o1, Object o2) + { + return Longs.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); + } + }; + } + + @Override + public Object combine(Object lhs, Object rhs) + { + return (((SerializablePair) lhs).lhs > ((SerializablePair) rhs).lhs) ? lhs : rhs; + } + + + @Override + public AggregatorFactory getCombiningFactory() + { + return new LongLastAggregatorFactory(name, name) + { + @Override + public Aggregator factorize(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + return new LongLastAggregator(name, null, null) + { + @Override + public void aggregate() + { + SerializablePair pair = (SerializablePair) selector.get(); + if (pair.rhs >= lastTime) { + lastTime = pair.lhs; + lastValue = pair.rhs; + } + } + }; + } + + @Override + public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) + { + final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name); + return new LongLastBufferAggregator(null, null) + { + @Override + public void aggregate(ByteBuffer buf, int position) + { + SerializablePair pair = (SerializablePair) selector.get(); + long lastTime = buf.getLong(position); + if (pair.lhs >= lastTime) { + buf.putLong(position, pair.lhs); + buf.putLong(position + Longs.BYTES, pair.rhs); + } + } + }; + } + }; + } + + @Override + public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException + { + if (other.getName().equals(this.getName()) && this.getClass() == other.getClass()) { + return getCombiningFactory(); + } else { + throw new AggregatorFactoryNotMergeableException(this, other); + } + } + + @Override + public List getRequiredColumns() + { + return Arrays.asList(new LongLastAggregatorFactory(fieldName, fieldName)); + } + + @Override + public Object deserialize(Object object) + { + Map map = (Map) object; + return new SerializablePair<>(((Number) map.get("lhs")).longValue(), ((Number) map.get("rhs")).longValue()); + } + + @Override + public Object finalizeComputation(Object object) + { + return ((SerializablePair) object).rhs; + } + + @Override + @JsonProperty + public String getName() + { + return name; + } + + @JsonProperty + public String getFieldName() + { + return fieldName; + } + + @Override + public List requiredFields() + { + return Arrays.asList(Column.TIME_COLUMN_NAME, fieldName); + } + + @Override + public byte[] getCacheKey() + { + byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); + + return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); + } + + + @Override + public String getTypeName() + { + return "long"; + } + + @Override + public int getMaxIntermediateSize() + { + return Longs.BYTES * 2; + } + + @Override + public Object getAggregatorStartValue() + { + throw new UnsupportedOperationException(); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + LongLastAggregatorFactory that = (LongLastAggregatorFactory) o; + + return name.equals(that.name) && fieldName.equals(that.fieldName); + } + + @Override + public int hashCode() + { + int result = name.hashCode(); + result = 31 * result + fieldName.hashCode(); + return result; + } + + @Override + public String toString() + { + return "DoubleLastAggregatorFactory{" + + "name='" + name + '\'' + + ", fieldName='" + fieldName + '\'' + + '}'; + } +} diff --git a/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java index 7f4d6b523ef5..225323aa4837 100644 --- a/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java @@ -23,9 +23,10 @@ import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; import io.druid.query.aggregation.first.DoubleFirstAggregator; +import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; import io.druid.query.aggregation.first.DoubleFirstBufferAggregator; -import io.druid.query.aggregation.first.FirstAggregatorFactory; import io.druid.query.aggregation.first.LongFirstAggregator; +import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.query.aggregation.first.LongFirstBufferAggregator; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.column.Column; @@ -38,8 +39,8 @@ public class FirstAggregationTest { - private FirstAggregatorFactory doubleFirstAggFactory; - private FirstAggregatorFactory longFirstAggFactory; + private DoubleFirstAggregatorFactory doubleFirstAggFactory; + private LongFirstAggregatorFactory longFirstAggFactory; private ColumnSelectorFactory colSelectorFactory; private TestLongColumnSelector timeSelector; private TestFloatColumnSelector floatSelector; @@ -51,10 +52,8 @@ public class FirstAggregationTest public FirstAggregationTest() throws Exception { - String doubleSpecJson = "{\"type\": \"first\", \"name\": \"billy\", \"fieldName\": \"nilly\", \"value\": \"double\"}"; - String longSpecJson = "{\"type\": \"first\", \"name\": \"bill\", \"fieldName\": \"nnn\", \"value\": \"long\"}"; - doubleFirstAggFactory = new DefaultObjectMapper().readValue(doubleSpecJson, FirstAggregatorFactory.class); - longFirstAggFactory = new DefaultObjectMapper().readValue(longSpecJson, FirstAggregatorFactory.class); + doubleFirstAggFactory = new DoubleFirstAggregatorFactory("billy", "nilly"); + longFirstAggFactory = new LongFirstAggregatorFactory("bill", "nnn"); } @Before @@ -171,10 +170,10 @@ public void testCombine() @Test public void testEqualsAndHashCode() throws Exception { - FirstAggregatorFactory one = new FirstAggregatorFactory("name1", "fieldName1", "double"); - FirstAggregatorFactory oneAgain = new FirstAggregatorFactory("name1", "fieldName1", "double"); - FirstAggregatorFactory two = new FirstAggregatorFactory("name1", "fieldName1", "long"); - FirstAggregatorFactory three = new FirstAggregatorFactory("name2", "fieldName2", "double"); + DoubleFirstAggregatorFactory one = new DoubleFirstAggregatorFactory("name1", "fieldName1"); + DoubleFirstAggregatorFactory oneAgain = new DoubleFirstAggregatorFactory("name1", "fieldName1"); + LongFirstAggregatorFactory two = new LongFirstAggregatorFactory("name1", "fieldName1"); + DoubleFirstAggregatorFactory three = new DoubleFirstAggregatorFactory("name2", "fieldName2"); Assert.assertEquals(one.hashCode(), oneAgain.hashCode()); @@ -183,6 +182,17 @@ public void testEqualsAndHashCode() throws Exception Assert.assertFalse(one.equals(three)); } + @Test + public void testSerde() throws Exception + { + DefaultObjectMapper mapper = new DefaultObjectMapper(); + String doubleSpecJson = "{\"type\":\"doubleFirst\",\"name\":\"billy\",\"fieldName\":\"nilly\"}"; + String longSpecJson = "{\"type\":\"longFirst\",\"name\":\"bill\",\"fieldName\":\"nnn\"}"; + + Assert.assertEquals(doubleFirstAggFactory, mapper.readValue(doubleSpecJson, AggregatorFactory.class)); + Assert.assertEquals(longFirstAggFactory, mapper.readValue(longSpecJson, AggregatorFactory.class)); + } + private void aggregate( TestLongColumnSelector timeSelector, TestFloatColumnSelector selector, diff --git a/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java index cdd256919da2..018a17b06ee6 100644 --- a/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java @@ -23,9 +23,10 @@ import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; import io.druid.query.aggregation.last.DoubleLastAggregator; +import io.druid.query.aggregation.last.DoubleLastAggregatorFactory; import io.druid.query.aggregation.last.DoubleLastBufferAggregator; -import io.druid.query.aggregation.last.LastAggregatorFactory; import io.druid.query.aggregation.last.LongLastAggregator; +import io.druid.query.aggregation.last.LongLastAggregatorFactory; import io.druid.query.aggregation.last.LongLastBufferAggregator; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.column.Column; @@ -38,8 +39,8 @@ public class LastAggregationTest { - private LastAggregatorFactory doubleLastAggFactory; - private LastAggregatorFactory longLastAggFactory; + private DoubleLastAggregatorFactory doubleLastAggFactory; + private LongLastAggregatorFactory longLastAggFactory; private ColumnSelectorFactory colSelectorFactory; private TestLongColumnSelector timeSelector; private TestFloatColumnSelector floatSelector; @@ -51,10 +52,10 @@ public class LastAggregationTest public LastAggregationTest() throws Exception { - String doubleSpecJson = "{\"type\": \"last\", \"name\": \"billy\", \"fieldName\": \"nilly\", \"value\": \"double\"}"; - String longSpecJson = "{\"type\": \"last\", \"name\": \"bill\", \"fieldName\": \"nnn\", \"value\": \"long\"}"; - doubleLastAggFactory = new DefaultObjectMapper().readValue(doubleSpecJson, LastAggregatorFactory.class); - longLastAggFactory = new DefaultObjectMapper().readValue(longSpecJson, LastAggregatorFactory.class); + String doubleSpecJson = "{\"type\": \"doubleLast\", \"name\": \"billy\", \"fieldName\": \"nilly\"}"; + String longSpecJson = "{\"type\": \"longLast\", \"name\": \"bill\", \"fieldName\": \"nnn\"}"; + doubleLastAggFactory = new DefaultObjectMapper().readValue(doubleSpecJson, DoubleLastAggregatorFactory.class); + longLastAggFactory = new DefaultObjectMapper().readValue(longSpecJson, LongLastAggregatorFactory.class); } @Before @@ -167,14 +168,13 @@ public void testCombine() Assert.assertEquals(pair2, doubleLastAggFactory.combine(pair1, pair2)); } - @Test public void testEqualsAndHashCode() throws Exception { - LastAggregatorFactory one = new LastAggregatorFactory("name1", "fieldName1", "double"); - LastAggregatorFactory oneAgain = new LastAggregatorFactory("name1", "fieldName1", "double"); - LastAggregatorFactory two = new LastAggregatorFactory("name1", "fieldName1", "long"); - LastAggregatorFactory three = new LastAggregatorFactory("name2", "fieldName2", "double"); + DoubleLastAggregatorFactory one = new DoubleLastAggregatorFactory("name1", "fieldName1"); + DoubleLastAggregatorFactory oneAgain = new DoubleLastAggregatorFactory("name1", "fieldName1"); + LongLastAggregatorFactory two = new LongLastAggregatorFactory("name1", "fieldName1"); + DoubleLastAggregatorFactory three = new DoubleLastAggregatorFactory("name2", "fieldName2"); Assert.assertEquals(one.hashCode(), oneAgain.hashCode()); @@ -183,6 +183,17 @@ public void testEqualsAndHashCode() throws Exception Assert.assertFalse(one.equals(three)); } + @Test + public void testSerde() throws Exception + { + DefaultObjectMapper mapper = new DefaultObjectMapper(); + String doubleSpecJson = "{\"type\":\"doubleLast\",\"name\":\"billy\",\"fieldName\":\"nilly\"}"; + String longSpecJson = "{\"type\":\"longLast\",\"name\":\"bill\",\"fieldName\":\"nnn\"}"; + + Assert.assertEquals(doubleLastAggFactory, mapper.readValue(doubleSpecJson, AggregatorFactory.class)); + Assert.assertEquals(longLastAggFactory, mapper.readValue(longSpecJson, AggregatorFactory.class)); + } + private void aggregate( TestLongColumnSelector timeSelector, TestFloatColumnSelector selector, diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index e39d4b300295..6dfceba2e780 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -59,14 +59,14 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.first.FirstAggregatorFactory; import io.druid.query.aggregation.JavaScriptAggregatorFactory; -import io.druid.query.aggregation.last.LastAggregatorFactory; +import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; +import io.druid.query.aggregation.last.LongLastAggregatorFactory; import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.ExpressionPostAggregator; @@ -1627,8 +1627,8 @@ public void testGroupByWithFirstLast() .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"))) .setAggregatorSpecs( Arrays.asList( - new FirstAggregatorFactory("first", "index", "long"), - new LastAggregatorFactory("last", "index", "long") + new LongFirstAggregatorFactory("first", "index"), + new LongLastAggregatorFactory("last", "index") ) ) .setGranularity(QueryRunnerTestHelper.monthGran) @@ -1653,6 +1653,31 @@ public void testGroupByWithFirstLast() TestHelper.assertExpectedObjects(expectedResults, results, ""); } + @Test + public void testGroupByWithNoResult() + { + GroupByQuery query = GroupByQuery + .builder() + .setDataSource(QueryRunnerTestHelper.dataSource) + .setQuerySegmentSpec(QueryRunnerTestHelper.emptyInterval) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"))) + .setAggregatorSpecs( + Arrays.asList( + QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.indexLongSum, + QueryRunnerTestHelper.qualityCardinality, + new LongFirstAggregatorFactory("first", "index"), + new LongLastAggregatorFactory("last", "index") + ) + ) + .setGranularity(QueryRunnerTestHelper.dayGran) + .build(); + + List expectedResults = ImmutableList.of(); + Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + Assert.assertEquals(expectedResults, results); + } + @Test public void testGroupByWithNullProducingDimExtractionFn() { @@ -5258,8 +5283,8 @@ public void testSubqueryWithFirstLast() .setAggregatorSpecs( ImmutableList.of( QueryRunnerTestHelper.rowsCount, - new FirstAggregatorFactory("innerfirst", "index", "long"), - new LastAggregatorFactory("innerlast", "index", "long") + new LongFirstAggregatorFactory("innerfirst", "index"), + new LongLastAggregatorFactory("innerlast", "index") ) ) .setGranularity(QueryRunnerTestHelper.dayGran) @@ -5272,8 +5297,8 @@ public void testSubqueryWithFirstLast() .setDimensions(Lists.newArrayList()) .setAggregatorSpecs( ImmutableList.of( - new FirstAggregatorFactory("first", "innerfirst", "long"), - new LastAggregatorFactory("last", "innerlast", "long") + new LongFirstAggregatorFactory("first", "innerfirst"), + new LongLastAggregatorFactory("last", "innerlast") ) ) .setGranularity(QueryRunnerTestHelper.monthGran) diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index 38b71f23e367..1b74bbb5332f 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -112,6 +112,13 @@ public GroupByTimeseriesQueryRunnerTest(QueryRunner runner) super(runner, false); } + @Override + public void testEmptyTimeseries() + { + // Skip this test because the timeseries test expects the empty range to have one entry, but group by + // does not expect anything + } + @Override public void testFullOnTimeseries() { diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 914e351dd934..3f3327e7a7a2 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -36,8 +36,8 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.first.FirstAggregatorFactory; -import io.druid.query.aggregation.last.LastAggregatorFactory; +import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; +import io.druid.query.aggregation.last.DoubleLastAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.extraction.MapLookupExtractor; @@ -113,6 +113,44 @@ public TimeseriesQueryRunnerTest( this.descending = descending; } + @Test + public void testEmptyTimeseries() + { + TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .intervals(QueryRunnerTestHelper.emptyInterval) + .aggregators( + Arrays.asList( + QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.indexDoubleSum, + new DoubleFirstAggregatorFactory("first", "index") + + ) + ) + .descending(descending) + .build(); + + List> expectedResults = ImmutableList.of( + new Result<>( + new DateTime("2020-04-02"), + new TimeseriesResultValue( + ImmutableMap.of( + "rows", 0L, + "index", 0D, + "first", 0D + ) + ) + ) + ); + + Iterable> actualResults = Sequences.toList( + runner.run(query, CONTEXT), + Lists.>newArrayList() + ); + TestHelper.assertExpectedResults(expectedResults, actualResults); + } + @Test public void testFullOnTimeseries() { @@ -1743,8 +1781,8 @@ public void testTimeseriesWithFirstLastAggregator() .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( ImmutableList.of( - new FirstAggregatorFactory("first", "index", "double"), - new LastAggregatorFactory("last", "index", "double") + new DoubleFirstAggregatorFactory("first", "index"), + new DoubleLastAggregatorFactory("last", "index") ) ) .descending(descending) diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 9ebac88ca45b..90b18dd52f83 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -47,14 +47,15 @@ import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.first.FirstAggregatorFactory; -import io.druid.query.aggregation.last.LastAggregatorFactory; +import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; +import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; +import io.druid.query.aggregation.last.LongLastAggregatorFactory; import io.druid.query.dimension.ExtractionDimensionSpec; import io.druid.query.extraction.DimExtractionFn; import io.druid.query.extraction.ExtractionFn; @@ -174,6 +175,40 @@ private Sequence> runWithMerge( return mergeRunner.run(query, context); } + @Test + public void testEmptyTopN() + { + TopNQuery query = new TopNQueryBuilder() + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .metric(QueryRunnerTestHelper.indexMetric) + .threshold(4) + .intervals(QueryRunnerTestHelper.emptyInterval) + .aggregators( + Lists.newArrayList( + Iterables.concat( + QueryRunnerTestHelper.commonAggregators, + Lists.newArrayList( + new DoubleMaxAggregatorFactory("maxIndex", "index"), + new DoubleMinAggregatorFactory("minIndex", "index"), + new DoubleFirstAggregatorFactory("first", "index") + ) + ) + ) + ) + .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .build(); + + List> expectedResults = ImmutableList.of( + new Result<>( + new DateTime("2020-04-02T00:00:00.000Z"), + new TopNResultValue(ImmutableList.of()) + ) + ); + assertExpectedResults(expectedResults, query); + } + @Test public void testFullOnTopN() { @@ -456,38 +491,104 @@ public void testTopNOverFirstLastAggregator() { TopNQuery query = new TopNQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .granularity(QueryRunnerTestHelper.monthGran) .dimension(QueryRunnerTestHelper.marketDimension) .metric("last") .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( Arrays.asList( - new FirstAggregatorFactory("first", "index", "long"), - new LastAggregatorFactory("last", "index", "long") + new LongFirstAggregatorFactory("first", "index"), + new LongLastAggregatorFactory("last", "index") ) ) .build(); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + new DateTime("2011-01-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( + ImmutableMap.builder() + .put("market", "total_market") + .put("first", 1000L) + .put("last", 1127L) + .build(), + ImmutableMap.builder() + .put("market", "upfront") + .put("first", 800L) + .put("last", 943L) + .build(), ImmutableMap.builder() .put("market", "spot") .put("first", 100L) - .put("last", 120L) + .put("last", 155L) + .build() + ) + ) + ), + new Result<>( + new DateTime("2011-02-01T00:00:00.000Z"), + new TopNResultValue( + Arrays.>asList( + ImmutableMap.builder() + .put("market", "total_market") + .put("first", 1203L) + .put("last", 1292L) .build(), + ImmutableMap.builder() + .put("market", "upfront") + .put("first", 1667L) + .put("last", 1101L) + .build(), + ImmutableMap.builder() + .put("market", "spot") + .put("first", 132L) + .put("last", 114L) + .build() + ) + ) + ), + new Result<>( + new DateTime("2011-03-01T00:00:00.000Z"), + new TopNResultValue( + Arrays.>asList( ImmutableMap.builder() .put("market", "total_market") - .put("first", 1000L) + .put("first", 1124L) + .put("last", 1366L) + .build(), + ImmutableMap.builder() + .put("market", "upfront") + .put("first", 1166L) + .put("last", 1063L) + .build(), + ImmutableMap.builder() + .put("market", "spot") + .put("first", 153L) + .put("last", 125L) + .build() + ) + ) + ), + new Result<>( + new DateTime("2011-04-01T00:00:00.000Z"), + new TopNResultValue( + Arrays.>asList( + ImmutableMap.builder() + .put("market", "total_market") + .put("first", 1314L) .put("last", 1029L) .build(), ImmutableMap.builder() .put("market", "upfront") - .put("first", 800L) + .put("first", 1447L) .put("last", 780L) + .build(), + ImmutableMap.builder() + .put("market", "spot") + .put("first", 135L) + .put("last", 120L) .build() ) ) From 78bcfae777a49e8da94f3495ec5f887af622d52e Mon Sep 17 00:00:00 2001 From: Dave Li Date: Fri, 5 Aug 2016 13:17:19 -0700 Subject: [PATCH 05/10] address PR comment --- .../first/DoubleFirstAggregatorFactory.java | 31 +++++++++++++------ .../first/LongFirstAggregatorFactory.java | 22 +++++++------ .../last/DoubleLastAggregatorFactory.java | 15 +++------ .../last/LongLastAggregatorFactory.java | 16 +++------- 4 files changed, 42 insertions(+), 42 deletions(-) diff --git a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java index 63e5d2703893..905924d0e3bc 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java @@ -42,6 +42,24 @@ public class DoubleFirstAggregatorFactory extends AggregatorFactory { + public static final Comparator VALUE_COMPARATOR = new Comparator() + { + @Override + public int compare(Object o1, Object o2) + { + return Doubles.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); + } + }; + + public static final Comparator TIME_COMPARATOR = new Comparator() + { + @Override + public int compare(Object o1, Object o2) + { + return Longs.compare(((SerializablePair) o1).lhs, ((SerializablePair) o2).lhs); + } + }; + private static final byte CACHE_TYPE_ID = 16; private final String fieldName; @@ -81,20 +99,13 @@ public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) @Override public Comparator getComparator() { - return new Comparator() - { - @Override - public int compare(Object o1, Object o2) - { - return Doubles.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); - } - }; + return VALUE_COMPARATOR; } @Override public Object combine(Object lhs, Object rhs) { - return (((SerializablePair) lhs).lhs <= ((SerializablePair) rhs).lhs) ? lhs : rhs; + return TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs; } @Override @@ -194,7 +205,7 @@ public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); + return ByteBuffer.allocate(2 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).put((byte)0xff).array(); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java index c9ec95955f4a..15ff5c25f63d 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java @@ -41,6 +41,15 @@ public class LongFirstAggregatorFactory extends AggregatorFactory { + public static final Comparator VALUE_COMPARATOR = new Comparator() + { + @Override + public int compare(Object o1, Object o2) + { + return Longs.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); + } + }; + private static final byte CACHE_TYPE_ID = 17; private final String fieldName; @@ -80,20 +89,13 @@ public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) @Override public Comparator getComparator() { - return new Comparator() - { - @Override - public int compare(Object o1, Object o2) - { - return Longs.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); - } - }; + return VALUE_COMPARATOR; } @Override public Object combine(Object lhs, Object rhs) { - return (((SerializablePair) lhs).lhs <= ((SerializablePair) rhs).lhs) ? lhs : rhs; + return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs; } @Override @@ -193,7 +195,7 @@ public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); + return ByteBuffer.allocate(2 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).put((byte)0xff).array(); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java index 9069740436b1..ed5957d8a698 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java @@ -24,13 +24,13 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; -import com.metamx.common.IAE; import com.metamx.common.StringUtils; import io.druid.collections.SerializablePair; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; @@ -82,20 +82,13 @@ public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) @Override public Comparator getComparator() { - return new Comparator() - { - @Override - public int compare(Object o1, Object o2) - { - return Doubles.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); - } - }; + return DoubleFirstAggregatorFactory.VALUE_COMPARATOR; } @Override public Object combine(Object lhs, Object rhs) { - return (((SerializablePair) lhs).lhs > ((SerializablePair) rhs).lhs) ? lhs : rhs; + return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs; } @Override @@ -195,7 +188,7 @@ public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); + return ByteBuffer.allocate(2 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).put((byte)0xff).array(); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java index 7fe14415fdff..2fb07da62523 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java @@ -29,6 +29,8 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; +import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; import io.druid.segment.column.Column; @@ -79,20 +81,13 @@ public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) @Override public Comparator getComparator() { - return new Comparator() - { - @Override - public int compare(Object o1, Object o2) - { - return Longs.compare(((SerializablePair) o1).rhs, ((SerializablePair) o2).rhs); - } - }; + return LongFirstAggregatorFactory.VALUE_COMPARATOR; } @Override public Object combine(Object lhs, Object rhs) { - return (((SerializablePair) lhs).lhs > ((SerializablePair) rhs).lhs) ? lhs : rhs; + return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs; } @@ -193,10 +188,9 @@ public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); + return ByteBuffer.allocate(2 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).put((byte)0xff).array(); } - @Override public String getTypeName() { From 890c8279fb09f790f554e9bb9e1882e4d5799b78 Mon Sep 17 00:00:00 2001 From: Dave Li Date: Fri, 12 Aug 2016 17:05:34 -0700 Subject: [PATCH 06/10] add finalize inner query and adjust v1 inner indexing --- .../druid/collections/SerializablePair.java | 2 +- .../druid/query/GroupByMergedQueryRunner.java | 3 +- .../JavaScriptAggregatorFactory.java | 2 +- .../first/DoubleFirstAggregatorFactory.java | 6 +- .../first/LongFirstAggregatorFactory.java | 6 +- .../last/DoubleLastAggregatorFactory.java | 8 ++- .../last/LongLastAggregatorFactory.java | 8 ++- .../query/groupby/GroupByQueryHelper.java | 33 ++++++---- .../groupby/GroupByQueryQueryToolChest.java | 17 ++++- .../groupby/strategy/GroupByStrategyV1.java | 22 ++++--- .../query/groupby/GroupByQueryRunnerTest.java | 66 ++++++++++++++++++- 11 files changed, 140 insertions(+), 33 deletions(-) diff --git a/common/src/main/java/io/druid/collections/SerializablePair.java b/common/src/main/java/io/druid/collections/SerializablePair.java index 0774ca066278..d02108ca7b73 100644 --- a/common/src/main/java/io/druid/collections/SerializablePair.java +++ b/common/src/main/java/io/druid/collections/SerializablePair.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.Pair; +import io.druid.java.util.common.Pair; public class SerializablePair extends Pair { diff --git a/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java b/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java index f1e710193751..f299c12ee864 100644 --- a/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java +++ b/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java @@ -87,7 +87,8 @@ public Sequence run(final Query queryParam, final Map resp final Pair> indexAccumulatorPair = GroupByQueryHelper.createIndexAccumulatorPair( query, querySpecificConfig, - bufferPool + bufferPool, + true ); final Pair> bySegmentAccumulatorPair = GroupByQueryHelper.createBySegmentAccumulatorPair(); final boolean bySegment = BaseQuery.getContextBySegment(query, false); diff --git a/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java index ff835abaaac9..288e8a23789c 100644 --- a/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java @@ -171,7 +171,7 @@ public List getRequiredColumns() @Override public AggregatorFactory apply(String input) { - return new JavaScriptAggregatorFactory(input, fieldNames, fnAggregate, fnReset, fnCombine, config); + return new JavaScriptAggregatorFactory(input, Lists.newArrayList(input), fnCombine, fnReset, fnCombine, config); } } ) diff --git a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java index 905924d0e3bc..32c0e7d9f7f5 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java @@ -205,7 +205,11 @@ public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(2 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).put((byte)0xff).array(); + return ByteBuffer.allocate(2 + fieldNameBytes.length) + .put(CACHE_TYPE_ID) + .put(fieldNameBytes) + .put((byte)0xff) + .array(); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java index 15ff5c25f63d..c5ad1bf24531 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java @@ -195,7 +195,11 @@ public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(2 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).put((byte)0xff).array(); + return ByteBuffer.allocate(2 + fieldNameBytes.length) + .put(CACHE_TYPE_ID) + .put(fieldNameBytes) + .put((byte)0xff) + .array(); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java index ed5957d8a698..8544e6092977 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java @@ -188,7 +188,11 @@ public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(2 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).put((byte)0xff).array(); + return ByteBuffer.allocate(2 + fieldNameBytes.length) + .put(CACHE_TYPE_ID) + .put(fieldNameBytes) + .put((byte)0xff) + .array(); } @Override @@ -235,7 +239,7 @@ public int hashCode() @Override public String toString() { - return "LongFirstAggregatorFactory{" + + return "DoubleFirstAggregatorFactory{" + "name='" + name + '\'' + ", fieldName='" + fieldName + '\'' + '}'; diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java index 2fb07da62523..06fc9e83d49a 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java @@ -188,7 +188,11 @@ public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - return ByteBuffer.allocate(2 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).put((byte)0xff).array(); + return ByteBuffer.allocate(2 + fieldNameBytes.length) + .put(CACHE_TYPE_ID) + .put(fieldNameBytes) + .put((byte)0xff) + .array(); } @Override @@ -235,7 +239,7 @@ public int hashCode() @Override public String toString() { - return "DoubleLastAggregatorFactory{" + + return "LongLastAggregatorFactory{" + "name='" + name + '\'' + ", fieldName='" + fieldName + '\'' + '}'; diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java index 1a916639836b..65d7555f059f 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java @@ -51,7 +51,8 @@ public class GroupByQueryHelper public static Pair> createIndexAccumulatorPair( final GroupByQuery query, final GroupByQueryConfig config, - StupidPool bufferPool + StupidPool bufferPool, + final boolean combine ) { final GroupByQueryConfig querySpecificConfig = config.withOverrides(query); @@ -62,17 +63,23 @@ public static Pair> creat // AllGranularity returns timeStart instead of Long.MIN_VALUE final long granTimeStart = gran.iterable(timeStart, timeStart + 1).iterator().next(); - final List aggs = Lists.transform( - query.getAggregatorSpecs(), - new Function() - { - @Override - public AggregatorFactory apply(AggregatorFactory input) + final List aggs; + if (combine) { + aggs = Lists.transform( + query.getAggregatorSpecs(), + new Function() { - return input.getCombiningFactory(); + @Override + public AggregatorFactory apply(AggregatorFactory input) + { + return input.getCombiningFactory(); + } } - } - ); + ); + } else { + aggs = query.getAggregatorSpecs(); + } + final List dimensions = Lists.transform( query.getDimensions(), new Function() @@ -169,13 +176,15 @@ public static IncrementalIndex makeIncrementalIndex( GroupByQuery query, GroupByQueryConfig config, StupidPool bufferPool, - Sequence rows + Sequence rows, + boolean combine ) { Pair> indexAccumulatorPair = GroupByQueryHelper.createIndexAccumulatorPair( query, config, - bufferPool + bufferPool, + combine ); return rows.accumulate(indexAccumulatorPair.lhs, indexAccumulatorPair.rhs); diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java index ad8b26f6da94..468800b8d028 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java @@ -38,6 +38,7 @@ import io.druid.granularity.QueryGranularity; import io.druid.guice.annotations.Global; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.MappedSequence; import io.druid.java.util.common.guava.Sequence; import io.druid.query.BaseQuery; import io.druid.query.CacheStrategy; @@ -171,7 +172,21 @@ private Sequence mergeGroupByResults( runner, context ); - return strategySelector.strategize(query).processSubqueryResult(subquery, query, subqueryResult); + + final Sequence finalizingResults; + if (GroupByQuery.getContextFinalize(subquery, false)) { + finalizingResults = new MappedSequence<>( + subqueryResult, + makePreComputeManipulatorFn( + subquery, + MetricManipulatorFns.finalizing() + ) + ); + } else { + finalizingResults = subqueryResult; + } + + return strategySelector.strategize(query).processSubqueryResult(subquery, query, finalizingResults); } else { return strategySelector.strategize(query).mergeResults(runner, query, context); } diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java index cdf926b7cc5d..5cc5491de65f 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java @@ -115,7 +115,8 @@ public Sequence mergeResults( ) ), responseContext - ) + ), + true ); return new ResourceClosingSequence<>(query.applyLimit(GroupByQueryHelper.postAggregate(query, index)), index); @@ -164,21 +165,26 @@ public boolean apply(AggregatorFactory agg) .setLimitSpec(query.getLimitSpec().merge(subquery.getLimitSpec())) .build(); - final IncrementalIndex innerQueryResultIndex = makeIncrementalIndex( + final IncrementalIndex innerQueryResultIndex = GroupByQueryHelper.makeIncrementalIndex( innerQuery.withOverriddenContext( ImmutableMap.of( GroupByQueryHelper.CTX_KEY_SORT_RESULTS, true ) ), - subqueryResult + configSupplier.get(), + bufferPool, + subqueryResult, + false ); //Outer query might have multiple intervals, but they are expected to be non-overlapping and sorted which //is ensured by QuerySegmentSpec. //GroupByQueryEngine can only process one interval at a time, so we need to call it once per interval //and concatenate the results. - final IncrementalIndex outerQueryResultIndex = makeIncrementalIndex( + final IncrementalIndex outerQueryResultIndex = GroupByQueryHelper.makeIncrementalIndex( outerQuery, + configSupplier.get(), + bufferPool, Sequences.concat( Sequences.map( Sequences.simple(outerQuery.getIntervals()), @@ -196,7 +202,8 @@ public Sequence apply(Interval interval) } } ) - ) + ), + true ); innerQueryResultIndex.close(); @@ -207,11 +214,6 @@ public Sequence apply(Interval interval) ); } - private IncrementalIndex makeIncrementalIndex(GroupByQuery query, Sequence rows) - { - return GroupByQueryHelper.makeIncrementalIndex(query, configSupplier.get(), bufferPool, rows); - } - @Override public QueryRunner mergeRunners( final ListeningExecutorService exec, diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index 6dfceba2e780..5eebc52c824f 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -4994,7 +4994,7 @@ public void testSubqueryWithOuterCountAggregator() } @Test - public void testSubqueryWithOuterJavascriptAggregators() + public void testSubqueryWithOuterDimJavascriptAggregators() { final GroupByQuery subquery = GroupByQuery .builder() @@ -5070,6 +5070,69 @@ public void testSubqueryWithOuterJavascriptAggregators() } } + @Test + public void testSubqueryWithOuterJavascriptAggregators() + { + final GroupByQuery subquery = GroupByQuery + .builder() + .setDataSource(QueryRunnerTestHelper.dataSource) + .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"), + new DefaultDimensionSpec("quality", "quality"))) + .setAggregatorSpecs( + Arrays.asList( + QueryRunnerTestHelper.rowsCount, + new LongSumAggregatorFactory("index", "index") + ) + ) + .setGranularity(QueryRunnerTestHelper.dayGran) + .build(); + + final GroupByQuery query = GroupByQuery + .builder() + .setDataSource(subquery) + .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"))) + .setAggregatorSpecs( + Arrays.asList( + new JavaScriptAggregatorFactory( + "js_agg", + Arrays.asList("index", "rows"), + "function(current, index, rows){return current + index + rows;}", + "function(){return 0;}", + "function(a,b){return a + b;}", + JavaScriptConfig.getDefault() + ) + ) + ) + .setGranularity(QueryRunnerTestHelper.dayGran) + .build(); + + List expectedResults = Arrays.asList( + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "automotive", "js_agg", 136D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "business", "js_agg", 119D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "entertainment", "js_agg", 159D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "health", "js_agg", 121D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "mezzanine", "js_agg", 2873D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "news", "js_agg", 122D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "premium", "js_agg", 2903D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "technology", "js_agg", 79D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "travel", "js_agg", 120D), + + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "quality", "automotive", "js_agg", 148D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "quality", "business", "js_agg", 113D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "quality", "entertainment", "js_agg", 167D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "quality", "health", "js_agg", 114D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "quality", "mezzanine", "js_agg", 2450D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "quality", "news", "js_agg", 115D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "quality", "premium", "js_agg", 2508D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "quality", "technology", "js_agg", 98D), + GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "quality", "travel", "js_agg", 127D) + ); + Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + TestHelper.assertExpectedObjects(expectedResults, results, ""); + } + @Test public void testSubqueryWithHyperUniques() { @@ -5288,6 +5351,7 @@ public void testSubqueryWithFirstLast() ) ) .setGranularity(QueryRunnerTestHelper.dayGran) + .setContext(ImmutableMap.of("finalize", true)) .build(); GroupByQuery query = GroupByQuery From 3c069a95fd3b98f1d8480441b608dc43222869a4 Mon Sep 17 00:00:00 2001 From: Dave Li Date: Fri, 12 Aug 2016 18:06:32 -0700 Subject: [PATCH 07/10] better test and fixes --- .../last/DoubleLastAggregatorFactory.java | 4 +- .../last/LongLastAggregatorFactory.java | 2 +- .../aggregation/FirstAggregationTest.java | 239 ----------------- .../aggregation/LastAggregationTest.java | 244 ------------------ .../first/DoubleFirstAggregationTest.java | 201 +++++++++++++++ .../first/LongFirstAggregationTest.java | 200 ++++++++++++++ .../last/DoubleLastAggregationTest.java | 201 +++++++++++++++ .../last/LongLastAggregationTest.java | 200 ++++++++++++++ 8 files changed, 805 insertions(+), 486 deletions(-) delete mode 100644 processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java delete mode 100644 processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java create mode 100644 processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java create mode 100644 processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java create mode 100644 processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java create mode 100644 processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java index 8544e6092977..8b6fc32369fc 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java @@ -105,8 +105,8 @@ public Aggregator factorize(ColumnSelectorFactory metricFactory) @Override public void aggregate() { - SerializablePair pair = (SerializablePair) selector.get(); - if (pair.rhs >= lastTime) { + SerializablePair pair = (SerializablePair) selector.get(); + if (pair.lhs >= lastTime) { lastTime = pair.lhs; lastValue = pair.rhs; } diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java index 06fc9e83d49a..c11686418387 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java @@ -106,7 +106,7 @@ public Aggregator factorize(ColumnSelectorFactory metricFactory) public void aggregate() { SerializablePair pair = (SerializablePair) selector.get(); - if (pair.rhs >= lastTime) { + if (pair.lhs >= lastTime) { lastTime = pair.lhs; lastValue = pair.rhs; } diff --git a/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java deleted file mode 100644 index 225323aa4837..000000000000 --- a/processing/src/test/java/io/druid/query/aggregation/FirstAggregationTest.java +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.query.aggregation; - -import com.metamx.common.Pair; -import io.druid.collections.SerializablePair; -import io.druid.jackson.DefaultObjectMapper; -import io.druid.query.aggregation.first.DoubleFirstAggregator; -import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; -import io.druid.query.aggregation.first.DoubleFirstBufferAggregator; -import io.druid.query.aggregation.first.LongFirstAggregator; -import io.druid.query.aggregation.first.LongFirstAggregatorFactory; -import io.druid.query.aggregation.first.LongFirstBufferAggregator; -import io.druid.segment.ColumnSelectorFactory; -import io.druid.segment.column.Column; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.nio.ByteBuffer; - -public class FirstAggregationTest -{ - private DoubleFirstAggregatorFactory doubleFirstAggFactory; - private LongFirstAggregatorFactory longFirstAggFactory; - private ColumnSelectorFactory colSelectorFactory; - private TestLongColumnSelector timeSelector; - private TestFloatColumnSelector floatSelector; - private TestLongColumnSelector longSelector; - - private long[] longValues = {62, 8, 54, 2}; - private float[] floatValues = {1.1f, 2.7f, 3.5f, 1.3f}; - private long[] times = {1467225096, 1467225098, 1467225099, 1467225111}; - - public FirstAggregationTest() throws Exception - { - doubleFirstAggFactory = new DoubleFirstAggregatorFactory("billy", "nilly"); - longFirstAggFactory = new LongFirstAggregatorFactory("bill", "nnn"); - } - - @Before - public void setup() - { - timeSelector = new TestLongColumnSelector(times); - floatSelector = new TestFloatColumnSelector(floatValues); - longSelector = new TestLongColumnSelector(longValues); - colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); - EasyMock.expect(colSelectorFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME)).andReturn(timeSelector); - EasyMock.expect(colSelectorFactory.makeFloatColumnSelector("nilly")).andReturn(floatSelector); - EasyMock.expect(colSelectorFactory.makeLongColumnSelector("nnn")).andReturn(longSelector); - EasyMock.replay(colSelectorFactory); - } - - @Test - public void testDoubleFirstAggregator() - { - DoubleFirstAggregator agg = (DoubleFirstAggregator) doubleFirstAggFactory.factorize(colSelectorFactory); - - Assert.assertEquals("billy", agg.getName()); - - aggregate(timeSelector, floatSelector, agg); - aggregate(timeSelector, floatSelector, agg); - aggregate(timeSelector, floatSelector, agg); - aggregate(timeSelector, floatSelector, agg); - - Pair result = (Pair) agg.get(); - - Assert.assertEquals(times[0], result.lhs.longValue()); - Assert.assertEquals(floatValues[0], result.rhs, 0.0001); - Assert.assertEquals((long) floatValues[0], agg.getLong()); - Assert.assertEquals(floatValues[0], agg.getFloat(), 0.0001); - - agg.reset(); - Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); - } - - @Test - public void testDoubleFirstBufferAggregator() - { - DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) doubleFirstAggFactory.factorizeBuffered( - colSelectorFactory); - - ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSize()]); - agg.init(buffer, 0); - - aggregate(timeSelector, floatSelector, agg, buffer, 0); - aggregate(timeSelector, floatSelector, agg, buffer, 0); - aggregate(timeSelector, floatSelector, agg, buffer, 0); - aggregate(timeSelector, floatSelector, agg, buffer, 0); - - Pair result = (Pair) agg.get(buffer, 0); - - Assert.assertEquals(times[0], result.lhs.longValue()); - Assert.assertEquals(floatValues[0], result.rhs, 0.0001); - Assert.assertEquals((long) floatValues[0], agg.getLong(buffer, 0)); - Assert.assertEquals(floatValues[0], agg.getFloat(buffer, 0), 0.0001); - } - - @Test - public void testLongFirstAggregator() - { - LongFirstAggregator agg = (LongFirstAggregator) longFirstAggFactory.factorize(colSelectorFactory); - - Assert.assertEquals("bill", agg.getName()); - - aggregate(timeSelector, longSelector, agg); - aggregate(timeSelector, longSelector, agg); - aggregate(timeSelector, longSelector, agg); - aggregate(timeSelector, longSelector, agg); - - Pair result = (Pair) agg.get(); - - Assert.assertEquals(times[0], result.lhs.longValue()); - Assert.assertEquals(longValues[0], result.rhs.longValue()); - Assert.assertEquals(longValues[0], agg.getLong()); - Assert.assertEquals(longValues[0], agg.getFloat(), 0.0001); - - agg.reset(); - Assert.assertEquals(0, ((Pair) agg.get()).rhs.longValue()); - } - - @Test - public void testLongFirstBufferAggregator() - { - LongFirstBufferAggregator agg = (LongFirstBufferAggregator) longFirstAggFactory.factorizeBuffered(colSelectorFactory); - - ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSize()]); - agg.init(buffer, 0); - - aggregate(timeSelector, longSelector, agg, buffer, 0); - aggregate(timeSelector, longSelector, agg, buffer, 0); - aggregate(timeSelector, longSelector, agg, buffer, 0); - aggregate(timeSelector, longSelector, agg, buffer, 0); - - Pair result = (Pair) agg.get(buffer, 0); - - Assert.assertEquals(times[0], result.lhs.longValue()); - Assert.assertEquals(longValues[0], result.rhs.longValue()); - Assert.assertEquals(longValues[0], agg.getLong(buffer, 0)); - Assert.assertEquals(longValues[0], agg.getFloat(buffer, 0), 0.0001); - } - - @Test - public void testCombine() - { - SerializablePair pair1 = new SerializablePair<>(1467225000L, 3.621); - SerializablePair pair2 = new SerializablePair<>(1467240000L, 785.4); - Assert.assertEquals(pair1, doubleFirstAggFactory.combine(pair1, pair2)); - } - - - @Test - public void testEqualsAndHashCode() throws Exception - { - DoubleFirstAggregatorFactory one = new DoubleFirstAggregatorFactory("name1", "fieldName1"); - DoubleFirstAggregatorFactory oneAgain = new DoubleFirstAggregatorFactory("name1", "fieldName1"); - LongFirstAggregatorFactory two = new LongFirstAggregatorFactory("name1", "fieldName1"); - DoubleFirstAggregatorFactory three = new DoubleFirstAggregatorFactory("name2", "fieldName2"); - - Assert.assertEquals(one.hashCode(), oneAgain.hashCode()); - - Assert.assertTrue(one.equals(oneAgain)); - Assert.assertFalse(one.equals(two)); - Assert.assertFalse(one.equals(three)); - } - - @Test - public void testSerde() throws Exception - { - DefaultObjectMapper mapper = new DefaultObjectMapper(); - String doubleSpecJson = "{\"type\":\"doubleFirst\",\"name\":\"billy\",\"fieldName\":\"nilly\"}"; - String longSpecJson = "{\"type\":\"longFirst\",\"name\":\"bill\",\"fieldName\":\"nnn\"}"; - - Assert.assertEquals(doubleFirstAggFactory, mapper.readValue(doubleSpecJson, AggregatorFactory.class)); - Assert.assertEquals(longFirstAggFactory, mapper.readValue(longSpecJson, AggregatorFactory.class)); - } - - private void aggregate( - TestLongColumnSelector timeSelector, - TestFloatColumnSelector selector, - DoubleFirstAggregator agg - ) - { - agg.aggregate(); - timeSelector.increment(); - selector.increment(); - } - - private void aggregate( - TestLongColumnSelector timeSelector, - TestFloatColumnSelector selector, - DoubleFirstBufferAggregator agg, - ByteBuffer buff, - int position - ) - { - agg.aggregate(buff, position); - timeSelector.increment(); - selector.increment(); - } - - private void aggregate(TestLongColumnSelector timeSelector, TestLongColumnSelector selector, LongFirstAggregator agg) - { - agg.aggregate(); - timeSelector.increment(); - selector.increment(); - } - - private void aggregate( - TestLongColumnSelector timeSelector, - TestLongColumnSelector selector, - LongFirstBufferAggregator agg, - ByteBuffer buff, - int position - ) - { - agg.aggregate(buff, position); - timeSelector.increment(); - selector.increment(); - } -} diff --git a/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java deleted file mode 100644 index 018a17b06ee6..000000000000 --- a/processing/src/test/java/io/druid/query/aggregation/LastAggregationTest.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.query.aggregation; - -import com.metamx.common.Pair; -import io.druid.collections.SerializablePair; -import io.druid.jackson.DefaultObjectMapper; -import io.druid.query.aggregation.last.DoubleLastAggregator; -import io.druid.query.aggregation.last.DoubleLastAggregatorFactory; -import io.druid.query.aggregation.last.DoubleLastBufferAggregator; -import io.druid.query.aggregation.last.LongLastAggregator; -import io.druid.query.aggregation.last.LongLastAggregatorFactory; -import io.druid.query.aggregation.last.LongLastBufferAggregator; -import io.druid.segment.ColumnSelectorFactory; -import io.druid.segment.column.Column; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.nio.ByteBuffer; - -public class LastAggregationTest -{ - private DoubleLastAggregatorFactory doubleLastAggFactory; - private LongLastAggregatorFactory longLastAggFactory; - private ColumnSelectorFactory colSelectorFactory; - private TestLongColumnSelector timeSelector; - private TestFloatColumnSelector floatSelector; - private TestLongColumnSelector longSelector; - - private long[] longValues = {62, 8, 54, 2}; - private float[] floatValues = {1.1f, 2.7f, 3.5f, 1.3f}; - private long[] times = {1467225096, 1467225098, 1467225099, 1467225111}; - - public LastAggregationTest() throws Exception - { - String doubleSpecJson = "{\"type\": \"doubleLast\", \"name\": \"billy\", \"fieldName\": \"nilly\"}"; - String longSpecJson = "{\"type\": \"longLast\", \"name\": \"bill\", \"fieldName\": \"nnn\"}"; - doubleLastAggFactory = new DefaultObjectMapper().readValue(doubleSpecJson, DoubleLastAggregatorFactory.class); - longLastAggFactory = new DefaultObjectMapper().readValue(longSpecJson, LongLastAggregatorFactory.class); - } - - @Before - public void setup() - { - timeSelector = new TestLongColumnSelector(times); - floatSelector = new TestFloatColumnSelector(floatValues); - longSelector = new TestLongColumnSelector(longValues); - colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); - EasyMock.expect(colSelectorFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME)).andReturn(timeSelector); - EasyMock.expect(colSelectorFactory.makeFloatColumnSelector("nilly")).andReturn(floatSelector); - EasyMock.expect(colSelectorFactory.makeLongColumnSelector("nnn")).andReturn(longSelector); - EasyMock.replay(colSelectorFactory); - } - - @Test - public void testDoubleLastAggregator() - { - DoubleLastAggregator agg = (DoubleLastAggregator) doubleLastAggFactory.factorize(colSelectorFactory); - - Assert.assertEquals("billy", agg.getName()); - - aggregate(timeSelector, floatSelector, agg); - aggregate(timeSelector, floatSelector, agg); - aggregate(timeSelector, floatSelector, agg); - aggregate(timeSelector, floatSelector, agg); - - Pair result = (Pair) agg.get(); - - Assert.assertEquals(times[3], result.lhs.longValue()); - Assert.assertEquals(floatValues[3], result.rhs, 0.0001); - Assert.assertEquals((long) floatValues[3], agg.getLong()); - Assert.assertEquals(floatValues[3], agg.getFloat(), 0.0001); - - agg.reset(); - Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); - } - - @Test - public void testDoubleLastBufferAggregator() - { - DoubleLastBufferAggregator agg = (DoubleLastBufferAggregator) doubleLastAggFactory.factorizeBuffered( - colSelectorFactory); - - ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSize()]); - agg.init(buffer, 0); - - aggregate(timeSelector, floatSelector, agg, buffer, 0); - aggregate(timeSelector, floatSelector, agg, buffer, 0); - aggregate(timeSelector, floatSelector, agg, buffer, 0); - aggregate(timeSelector, floatSelector, agg, buffer, 0); - - Pair result = (Pair) agg.get(buffer, 0); - - Assert.assertEquals(times[3], result.lhs.longValue()); - Assert.assertEquals(floatValues[3], result.rhs, 0.0001); - Assert.assertEquals((long) floatValues[3], agg.getLong(buffer, 0)); - Assert.assertEquals(floatValues[3], agg.getFloat(buffer, 0), 0.0001); - } - - @Test - public void testLongLastAggregator() - { - LongLastAggregator agg = (LongLastAggregator) longLastAggFactory.factorize(colSelectorFactory); - - Assert.assertEquals("bill", agg.getName()); - - aggregate(timeSelector, longSelector, agg); - aggregate(timeSelector, longSelector, agg); - aggregate(timeSelector, longSelector, agg); - aggregate(timeSelector, longSelector, agg); - - Pair result = (Pair) agg.get(); - - Assert.assertEquals(times[3], result.lhs.longValue()); - Assert.assertEquals(longValues[3], result.rhs.longValue()); - Assert.assertEquals(longValues[3], agg.getLong()); - Assert.assertEquals(longValues[3], agg.getFloat(), 0.0001); - - agg.reset(); - Assert.assertEquals(0, ((Pair) agg.get()).rhs.longValue()); - } - - @Test - public void testLongLastBufferAggregator() - { - LongLastBufferAggregator agg = (LongLastBufferAggregator) longLastAggFactory.factorizeBuffered(colSelectorFactory); - - ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSize()]); - agg.init(buffer, 0); - - aggregate(timeSelector, longSelector, agg, buffer, 0); - aggregate(timeSelector, longSelector, agg, buffer, 0); - aggregate(timeSelector, longSelector, agg, buffer, 0); - aggregate(timeSelector, longSelector, agg, buffer, 0); - - Pair result = (Pair) agg.get(buffer, 0); - - Assert.assertEquals(times[3], result.lhs.longValue()); - Assert.assertEquals(longValues[3], result.rhs.longValue()); - Assert.assertEquals(longValues[3], agg.getLong(buffer, 0)); - Assert.assertEquals(longValues[3], agg.getFloat(buffer, 0), 0.0001); - } - - @Test - public void testCombine() - { - SerializablePair pair1 = new SerializablePair<>(1467225000L, 3.621); - SerializablePair pair2 = new SerializablePair<>(1467240000L, 785.4); - Assert.assertEquals(pair2, doubleLastAggFactory.combine(pair1, pair2)); - } - - @Test - public void testEqualsAndHashCode() throws Exception - { - DoubleLastAggregatorFactory one = new DoubleLastAggregatorFactory("name1", "fieldName1"); - DoubleLastAggregatorFactory oneAgain = new DoubleLastAggregatorFactory("name1", "fieldName1"); - LongLastAggregatorFactory two = new LongLastAggregatorFactory("name1", "fieldName1"); - DoubleLastAggregatorFactory three = new DoubleLastAggregatorFactory("name2", "fieldName2"); - - Assert.assertEquals(one.hashCode(), oneAgain.hashCode()); - - Assert.assertTrue(one.equals(oneAgain)); - Assert.assertFalse(one.equals(two)); - Assert.assertFalse(one.equals(three)); - } - - @Test - public void testSerde() throws Exception - { - DefaultObjectMapper mapper = new DefaultObjectMapper(); - String doubleSpecJson = "{\"type\":\"doubleLast\",\"name\":\"billy\",\"fieldName\":\"nilly\"}"; - String longSpecJson = "{\"type\":\"longLast\",\"name\":\"bill\",\"fieldName\":\"nnn\"}"; - - Assert.assertEquals(doubleLastAggFactory, mapper.readValue(doubleSpecJson, AggregatorFactory.class)); - Assert.assertEquals(longLastAggFactory, mapper.readValue(longSpecJson, AggregatorFactory.class)); - } - - private void aggregate( - TestLongColumnSelector timeSelector, - TestFloatColumnSelector selector, - DoubleLastAggregator agg - ) - { - agg.aggregate(); - timeSelector.increment(); - selector.increment(); - } - - private void aggregate( - TestLongColumnSelector timeSelector, - TestFloatColumnSelector selector, - DoubleLastBufferAggregator agg, - ByteBuffer buff, - int position - ) - { - agg.aggregate(buff, position); - timeSelector.increment(); - selector.increment(); - } - - private void aggregate( - TestLongColumnSelector timeSelector, - TestLongColumnSelector selector, - LongLastAggregator agg - ) - { - agg.aggregate(); - timeSelector.increment(); - selector.increment(); - } - - private void aggregate( - TestLongColumnSelector timeSelector, - TestLongColumnSelector selector, - LongLastBufferAggregator agg, - ByteBuffer buff, - int position - ) - { - agg.aggregate(buff, position); - timeSelector.increment(); - selector.increment(); - } -} diff --git a/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java new file mode 100644 index 000000000000..abb292369732 --- /dev/null +++ b/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java @@ -0,0 +1,201 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation.first; + +import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.TestFloatColumnSelector; +import io.druid.query.aggregation.TestLongColumnSelector; +import io.druid.query.aggregation.TestObjectColumnSelector; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.column.Column; +import org.easymock.EasyMock; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.nio.ByteBuffer; + +public class DoubleFirstAggregationTest +{ + private DoubleFirstAggregatorFactory doubleFirstAggFactory; + private DoubleFirstAggregatorFactory combiningAggFactory; + private ColumnSelectorFactory colSelectorFactory; + private TestLongColumnSelector timeSelector; + private TestFloatColumnSelector valueSelector; + private TestObjectColumnSelector objectSelector; + + private float[] floatValues = {1.1f, 2.7f, 3.5f, 1.3f}; + private long[] times = {12, 10, 5344, 7899999}; + private SerializablePair[] pairs = { + new SerializablePair<>(1467225096L, 134.3d), + new SerializablePair<>(23163L, 1232.212d), + new SerializablePair<>(742L, 18d), + new SerializablePair<>(111111L, 233.5232d) + }; + + @Before + public void setup() + { + doubleFirstAggFactory = new DoubleFirstAggregatorFactory("billy", "nilly"); + combiningAggFactory = (DoubleFirstAggregatorFactory) doubleFirstAggFactory.getCombiningFactory(); + timeSelector = new TestLongColumnSelector(times); + valueSelector = new TestFloatColumnSelector(floatValues); + objectSelector = new TestObjectColumnSelector(pairs); + colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME)).andReturn(timeSelector); + EasyMock.expect(colSelectorFactory.makeFloatColumnSelector("nilly")).andReturn(valueSelector); + EasyMock.expect(colSelectorFactory.makeObjectColumnSelector("billy")).andReturn(objectSelector); + EasyMock.replay(colSelectorFactory); + } + + @Test + public void testDoubleFirstAggregator() + { + DoubleFirstAggregator agg = (DoubleFirstAggregator) doubleFirstAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(agg); + aggregate(agg); + aggregate(agg); + aggregate(agg); + + Pair result = (Pair) agg.get(); + + Assert.assertEquals(times[1], result.lhs.longValue()); + Assert.assertEquals(floatValues[1], result.rhs, 0.0001); + Assert.assertEquals((long) floatValues[1], agg.getLong()); + Assert.assertEquals(floatValues[1], agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); + } + + @Test + public void testDoubleFirstBufferAggregator() + { + DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) doubleFirstAggFactory.factorizeBuffered( + colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + + Assert.assertEquals(times[1], result.lhs.longValue()); + Assert.assertEquals(floatValues[1], result.rhs, 0.0001); + Assert.assertEquals((long) floatValues[1], agg.getLong(buffer, 0)); + Assert.assertEquals(floatValues[1], agg.getFloat(buffer, 0), 0.0001); + } + + @Test + public void testCombine() + { + SerializablePair pair1 = new SerializablePair<>(1467225000L, 3.621); + SerializablePair pair2 = new SerializablePair<>(1467240000L, 785.4); + Assert.assertEquals(pair1, doubleFirstAggFactory.combine(pair1, pair2)); + } + + @Test + public void testDoubleFirstCombiningAggregator() + { + DoubleFirstAggregator agg = (DoubleFirstAggregator) combiningAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(agg); + aggregate(agg); + aggregate(agg); + aggregate(agg); + + Pair result = (Pair) agg.get(); + Pair expected = (Pair)pairs[2]; + + Assert.assertEquals(expected.lhs, result.lhs); + Assert.assertEquals(expected.rhs, result.rhs, 0.0001); + Assert.assertEquals(expected.rhs.longValue(), agg.getLong()); + Assert.assertEquals(expected.rhs, agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); + } + + @Test + public void testDoubleFirstCombiningBufferAggregator() + { + DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) combiningAggFactory.factorizeBuffered( + colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + Pair expected = (Pair)pairs[2]; + + Assert.assertEquals(expected.lhs, result.lhs); + Assert.assertEquals(expected.rhs, result.rhs, 0.0001); + Assert.assertEquals(expected.rhs.longValue(), agg.getLong(buffer, 0)); + Assert.assertEquals(expected.rhs, agg.getFloat(buffer, 0), 0.0001); + } + + + @Test + public void testSerde() throws Exception + { + DefaultObjectMapper mapper = new DefaultObjectMapper(); + String doubleSpecJson = "{\"type\":\"doubleFirst\",\"name\":\"billy\",\"fieldName\":\"nilly\"}"; + Assert.assertEquals(doubleFirstAggFactory, mapper.readValue(doubleSpecJson, AggregatorFactory.class)); + } + + private void aggregate( + DoubleFirstAggregator agg + ) + { + agg.aggregate(); + timeSelector.increment(); + valueSelector.increment(); + objectSelector.increment(); + } + + private void aggregate( + DoubleFirstBufferAggregator agg, + ByteBuffer buff, + int position + ) + { + agg.aggregate(buff, position); + timeSelector.increment(); + valueSelector.increment(); + objectSelector.increment(); + } +} diff --git a/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java new file mode 100644 index 000000000000..9e715f94fe73 --- /dev/null +++ b/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java @@ -0,0 +1,200 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation.first; + +import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.TestLongColumnSelector; +import io.druid.query.aggregation.TestObjectColumnSelector; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.column.Column; +import org.easymock.EasyMock; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.nio.ByteBuffer; + +public class LongFirstAggregationTest +{ + private LongFirstAggregatorFactory longFirstAggFactory; + private LongFirstAggregatorFactory combiningAggFactory; + private ColumnSelectorFactory colSelectorFactory; + private TestLongColumnSelector timeSelector; + private TestLongColumnSelector valueSelector; + private TestObjectColumnSelector objectSelector; + + private long[] longValues = {185, -216, -128751132, Long.MIN_VALUE}; + private long[] times = {1123126751, 1784247991, 1854329816, 1000000000}; + private SerializablePair[] pairs = { + new SerializablePair<>(1L, 113267L), + new SerializablePair<>(1L, 5437384L), + new SerializablePair<>(6L, 34583458L), + new SerializablePair<>(88L, 34583452L) + }; + + @Before + public void setup() + { + longFirstAggFactory = new LongFirstAggregatorFactory("billy", "nilly"); + combiningAggFactory = (LongFirstAggregatorFactory) longFirstAggFactory.getCombiningFactory(); + timeSelector = new TestLongColumnSelector(times); + valueSelector = new TestLongColumnSelector(longValues); + objectSelector = new TestObjectColumnSelector(pairs); + colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME)).andReturn(timeSelector); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector("nilly")).andReturn(valueSelector); + EasyMock.expect(colSelectorFactory.makeObjectColumnSelector("billy")).andReturn(objectSelector); + EasyMock.replay(colSelectorFactory); + } + + @Test + public void testLongFirstAggregator() + { + LongFirstAggregator agg = (LongFirstAggregator) longFirstAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(agg); + aggregate(agg); + aggregate(agg); + aggregate(agg); + + Pair result = (Pair) agg.get(); + + Assert.assertEquals(times[3], result.lhs.longValue()); + Assert.assertEquals(longValues[3], result.rhs.longValue()); + Assert.assertEquals(longValues[3], agg.getLong()); + Assert.assertEquals(longValues[3], agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); + } + + @Test + public void testLongFirstBufferAggregator() + { + LongFirstBufferAggregator agg = (LongFirstBufferAggregator) longFirstAggFactory.factorizeBuffered( + colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + + Assert.assertEquals(times[3], result.lhs.longValue()); + Assert.assertEquals(longValues[3], result.rhs.longValue()); + Assert.assertEquals(longValues[3], agg.getLong(buffer, 0)); + Assert.assertEquals(longValues[3], agg.getFloat(buffer, 0), 0.0001); + } + + @Test + public void testCombine() + { + SerializablePair pair1 = new SerializablePair<>(1467225000L, 1263L); + SerializablePair pair2 = new SerializablePair<>(1467240000L, 752713L); + Assert.assertEquals(pair1, longFirstAggFactory.combine(pair1, pair2)); + } + + @Test + public void testLongFirstCombiningAggregator() + { + LongFirstAggregator agg = (LongFirstAggregator) combiningAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(agg); + aggregate(agg); + aggregate(agg); + aggregate(agg); + + Pair result = (Pair) agg.get(); + Pair expected = (Pair)pairs[0]; + + Assert.assertEquals(expected.lhs, result.lhs); + Assert.assertEquals(expected.rhs, result.rhs); + Assert.assertEquals(expected.rhs.longValue(), agg.getLong()); + Assert.assertEquals(expected.rhs, agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); + } + + @Test + public void testLongFirstCombiningBufferAggregator() + { + LongFirstBufferAggregator agg = (LongFirstBufferAggregator) combiningAggFactory.factorizeBuffered( + colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + Pair expected = (Pair)pairs[0]; + + Assert.assertEquals(expected.lhs, result.lhs); + Assert.assertEquals(expected.rhs, result.rhs); + Assert.assertEquals(expected.rhs.longValue(), agg.getLong(buffer, 0)); + Assert.assertEquals(expected.rhs, agg.getFloat(buffer, 0), 0.0001); + } + + + @Test + public void testSerde() throws Exception + { + DefaultObjectMapper mapper = new DefaultObjectMapper(); + String longSpecJson = "{\"type\":\"longFirst\",\"name\":\"billy\",\"fieldName\":\"nilly\"}"; + Assert.assertEquals(longFirstAggFactory, mapper.readValue(longSpecJson, AggregatorFactory.class)); + } + + private void aggregate( + LongFirstAggregator agg + ) + { + agg.aggregate(); + timeSelector.increment(); + valueSelector.increment(); + objectSelector.increment(); + } + + private void aggregate( + LongFirstBufferAggregator agg, + ByteBuffer buff, + int position + ) + { + agg.aggregate(buff, position); + timeSelector.increment(); + valueSelector.increment(); + objectSelector.increment(); + } +} diff --git a/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java new file mode 100644 index 000000000000..ba8889f5a6b4 --- /dev/null +++ b/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java @@ -0,0 +1,201 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation.last; + +import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.TestFloatColumnSelector; +import io.druid.query.aggregation.TestLongColumnSelector; +import io.druid.query.aggregation.TestObjectColumnSelector; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.column.Column; +import org.easymock.EasyMock; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.nio.ByteBuffer; + +public class DoubleLastAggregationTest +{ + private DoubleLastAggregatorFactory doubleLastAggFactory; + private DoubleLastAggregatorFactory combiningAggFactory; + private ColumnSelectorFactory colSelectorFactory; + private TestLongColumnSelector timeSelector; + private TestFloatColumnSelector valueSelector; + private TestObjectColumnSelector objectSelector; + + private float[] floatValues = {1.1897f, 0.001f, 86.23f, 166.228f}; + private long[] times = {8224, 6879, 2436, 7888}; + private SerializablePair[] pairs = { + new SerializablePair<>(52782L, 134.3d), + new SerializablePair<>(65492L, 1232.212d), + new SerializablePair<>(69134L, 18.1233d), + new SerializablePair<>(11111L, 233.5232d) + }; + + @Before + public void setup() + { + doubleLastAggFactory = new DoubleLastAggregatorFactory("billy", "nilly"); + combiningAggFactory = (DoubleLastAggregatorFactory) doubleLastAggFactory.getCombiningFactory(); + timeSelector = new TestLongColumnSelector(times); + valueSelector = new TestFloatColumnSelector(floatValues); + objectSelector = new TestObjectColumnSelector(pairs); + colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME)).andReturn(timeSelector); + EasyMock.expect(colSelectorFactory.makeFloatColumnSelector("nilly")).andReturn(valueSelector); + EasyMock.expect(colSelectorFactory.makeObjectColumnSelector("billy")).andReturn(objectSelector); + EasyMock.replay(colSelectorFactory); + } + + @Test + public void testDoubleLastAggregator() + { + DoubleLastAggregator agg = (DoubleLastAggregator) doubleLastAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(agg); + aggregate(agg); + aggregate(agg); + aggregate(agg); + + Pair result = (Pair) agg.get(); + + Assert.assertEquals(times[0], result.lhs.longValue()); + Assert.assertEquals(floatValues[0], result.rhs, 0.0001); + Assert.assertEquals((long) floatValues[0], agg.getLong()); + Assert.assertEquals(floatValues[0], agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); + } + + @Test + public void testDoubleLastBufferAggregator() + { + DoubleLastBufferAggregator agg = (DoubleLastBufferAggregator) doubleLastAggFactory.factorizeBuffered( + colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + + Assert.assertEquals(times[0], result.lhs.longValue()); + Assert.assertEquals(floatValues[0], result.rhs, 0.0001); + Assert.assertEquals((long) floatValues[0], agg.getLong(buffer, 0)); + Assert.assertEquals(floatValues[0], agg.getFloat(buffer, 0), 0.0001); + } + + @Test + public void testCombine() + { + SerializablePair pair1 = new SerializablePair<>(1467225000L, 3.621); + SerializablePair pair2 = new SerializablePair<>(1467240000L, 785.4); + Assert.assertEquals(pair2, doubleLastAggFactory.combine(pair1, pair2)); + } + + @Test + public void testDoubleLastCombiningAggregator() + { + DoubleLastAggregator agg = (DoubleLastAggregator) combiningAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(agg); + aggregate(agg); + aggregate(agg); + aggregate(agg); + + Pair result = (Pair) agg.get(); + Pair expected = (Pair)pairs[2]; + + Assert.assertEquals(expected.lhs, result.lhs); + Assert.assertEquals(expected.rhs, result.rhs, 0.0001); + Assert.assertEquals(expected.rhs.longValue(), agg.getLong()); + Assert.assertEquals(expected.rhs, agg.getFloat(), 0.0001); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs, 0.0001); + } + + @Test + public void testDoubleLastCombiningBufferAggregator() + { + DoubleLastBufferAggregator agg = (DoubleLastBufferAggregator) combiningAggFactory.factorizeBuffered( + colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + Pair expected = (Pair)pairs[2]; + + Assert.assertEquals(expected.lhs, result.lhs); + Assert.assertEquals(expected.rhs, result.rhs, 0.0001); + Assert.assertEquals(expected.rhs.longValue(), agg.getLong(buffer, 0)); + Assert.assertEquals(expected.rhs, agg.getFloat(buffer, 0), 0.0001); + } + + + @Test + public void testSerde() throws Exception + { + DefaultObjectMapper mapper = new DefaultObjectMapper(); + String doubleSpecJson = "{\"type\":\"doubleLast\",\"name\":\"billy\",\"fieldName\":\"nilly\"}"; + Assert.assertEquals(doubleLastAggFactory, mapper.readValue(doubleSpecJson, AggregatorFactory.class)); + } + + private void aggregate( + DoubleLastAggregator agg + ) + { + agg.aggregate(); + timeSelector.increment(); + valueSelector.increment(); + objectSelector.increment(); + } + + private void aggregate( + DoubleLastBufferAggregator agg, + ByteBuffer buff, + int position + ) + { + agg.aggregate(buff, position); + timeSelector.increment(); + valueSelector.increment(); + objectSelector.increment(); + } +} diff --git a/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java new file mode 100644 index 000000000000..ebd971ad6369 --- /dev/null +++ b/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java @@ -0,0 +1,200 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation.last; + +import com.metamx.common.Pair; +import io.druid.collections.SerializablePair; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.TestLongColumnSelector; +import io.druid.query.aggregation.TestObjectColumnSelector; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.column.Column; +import org.easymock.EasyMock; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.nio.ByteBuffer; + +public class LongLastAggregationTest +{ + private LongLastAggregatorFactory longLastAggFactory; + private LongLastAggregatorFactory combiningAggFactory; + private ColumnSelectorFactory colSelectorFactory; + private TestLongColumnSelector timeSelector; + private TestLongColumnSelector valueSelector; + private TestObjectColumnSelector objectSelector; + + private long[] longValues = {23216, 8635, 1547123, Long.MAX_VALUE}; + private long[] times = {1467935723, 1467225653, 1601848932, 72515}; + private SerializablePair[] pairs = { + new SerializablePair<>(12531L, 113267L), + new SerializablePair<>(123L, 5437384L), + new SerializablePair<>(125755L, 34583458L), + new SerializablePair<>(124L, 34283452L) + }; + + @Before + public void setup() + { + longLastAggFactory = new LongLastAggregatorFactory("billy", "nilly"); + combiningAggFactory = (LongLastAggregatorFactory) longLastAggFactory.getCombiningFactory(); + timeSelector = new TestLongColumnSelector(times); + valueSelector = new TestLongColumnSelector(longValues); + objectSelector = new TestObjectColumnSelector(pairs); + colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME)).andReturn(timeSelector); + EasyMock.expect(colSelectorFactory.makeLongColumnSelector("nilly")).andReturn(valueSelector); + EasyMock.expect(colSelectorFactory.makeObjectColumnSelector("billy")).andReturn(objectSelector); + EasyMock.replay(colSelectorFactory); + } + + @Test + public void testLongLastAggregator() + { + LongLastAggregator agg = (LongLastAggregator) longLastAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(agg); + aggregate(agg); + aggregate(agg); + aggregate(agg); + + Pair result = (Pair) agg.get(); + + Assert.assertEquals(times[2], result.lhs.longValue()); + Assert.assertEquals(longValues[2], result.rhs.longValue()); + Assert.assertEquals(longValues[2], agg.getLong()); + Assert.assertEquals(longValues[2], agg.getFloat(), 1); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs.longValue()); + } + + @Test + public void testLongLastBufferAggregator() + { + LongLastBufferAggregator agg = (LongLastBufferAggregator) longLastAggFactory.factorizeBuffered( + colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + + Assert.assertEquals(times[2], result.lhs.longValue()); + Assert.assertEquals(longValues[2], result.rhs.longValue()); + Assert.assertEquals(longValues[2], agg.getLong(buffer, 0)); + Assert.assertEquals(longValues[2], agg.getFloat(buffer, 0), 1); + } + + @Test + public void testCombine() + { + SerializablePair pair1 = new SerializablePair<>(1467225000L, 64432L); + SerializablePair pair2 = new SerializablePair<>(1467240000L, 99999L); + Assert.assertEquals(pair2, longLastAggFactory.combine(pair1, pair2)); + } + + @Test + public void testLongLastCombiningAggregator() + { + LongLastAggregator agg = (LongLastAggregator) combiningAggFactory.factorize(colSelectorFactory); + + Assert.assertEquals("billy", agg.getName()); + + aggregate(agg); + aggregate(agg); + aggregate(agg); + aggregate(agg); + + Pair result = (Pair) agg.get(); + Pair expected = (Pair)pairs[2]; + + Assert.assertEquals(expected.lhs, result.lhs); + Assert.assertEquals(expected.rhs, result.rhs); + Assert.assertEquals(expected.rhs.longValue(), agg.getLong()); + Assert.assertEquals(expected.rhs, agg.getFloat(), 1); + + agg.reset(); + Assert.assertEquals(0, ((Pair) agg.get()).rhs.longValue()); + } + + @Test + public void testLongLastCombiningBufferAggregator() + { + LongLastBufferAggregator agg = (LongLastBufferAggregator) combiningAggFactory.factorizeBuffered( + colSelectorFactory); + + ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSize()]); + agg.init(buffer, 0); + + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + aggregate(agg, buffer, 0); + + Pair result = (Pair) agg.get(buffer, 0); + Pair expected = (Pair)pairs[2]; + + Assert.assertEquals(expected.lhs, result.lhs); + Assert.assertEquals(expected.rhs, result.rhs); + Assert.assertEquals(expected.rhs.longValue(), agg.getLong(buffer, 0)); + Assert.assertEquals(expected.rhs, agg.getFloat(buffer, 0), 1); + } + + + @Test + public void testSerde() throws Exception + { + DefaultObjectMapper mapper = new DefaultObjectMapper(); + String longSpecJson = "{\"type\":\"longLast\",\"name\":\"billy\",\"fieldName\":\"nilly\"}"; + Assert.assertEquals(longLastAggFactory, mapper.readValue(longSpecJson, AggregatorFactory.class)); + } + + private void aggregate( + LongLastAggregator agg + ) + { + agg.aggregate(); + timeSelector.increment(); + valueSelector.increment(); + objectSelector.increment(); + } + + private void aggregate( + LongLastBufferAggregator agg, + ByteBuffer buff, + int position + ) + { + agg.aggregate(buff, position); + timeSelector.increment(); + valueSelector.increment(); + objectSelector.increment(); + } +} From 481b526b7c87fe77a8f7ab6007bd388af0746422 Mon Sep 17 00:00:00 2001 From: jon-wei Date: Fri, 4 Nov 2016 14:22:38 -0700 Subject: [PATCH 08/10] java-util import fixes --- .../query/aggregation/first/DoubleFirstAggregationTest.java | 2 +- .../druid/query/aggregation/first/LongFirstAggregationTest.java | 2 +- .../druid/query/aggregation/last/DoubleLastAggregationTest.java | 2 +- .../druid/query/aggregation/last/LongLastAggregationTest.java | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java index abb292369732..1e31600cbd14 100644 --- a/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java @@ -19,9 +19,9 @@ package io.druid.query.aggregation.first; -import com.metamx.common.Pair; import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Pair; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.TestFloatColumnSelector; import io.druid.query.aggregation.TestLongColumnSelector; diff --git a/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java index 9e715f94fe73..d95b75f5a991 100644 --- a/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java @@ -19,9 +19,9 @@ package io.druid.query.aggregation.first; -import com.metamx.common.Pair; import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Pair; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.TestLongColumnSelector; import io.druid.query.aggregation.TestObjectColumnSelector; diff --git a/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java index ba8889f5a6b4..ffc1e553a1b9 100644 --- a/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java @@ -19,9 +19,9 @@ package io.druid.query.aggregation.last; -import com.metamx.common.Pair; import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Pair; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.TestFloatColumnSelector; import io.druid.query.aggregation.TestLongColumnSelector; diff --git a/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java index ebd971ad6369..3c95b9fd9817 100644 --- a/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java @@ -19,9 +19,9 @@ package io.druid.query.aggregation.last; -import com.metamx.common.Pair; import io.druid.collections.SerializablePair; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Pair; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.TestLongColumnSelector; import io.druid.query.aggregation.TestObjectColumnSelector; From cc2558633e531409ff07e01d4844b9f2b107b51c Mon Sep 17 00:00:00 2001 From: jon-wei Date: Tue, 13 Dec 2016 15:02:48 -0800 Subject: [PATCH 09/10] PR comments --- docs/content/querying/aggregations.md | 2 ++ .../aggregation/first/DoubleFirstAggregator.java | 12 ++++++++---- .../first/DoubleFirstAggregatorFactory.java | 5 +++-- .../query/aggregation/first/LongFirstAggregator.java | 10 +++++++--- .../first/LongFirstAggregatorFactory.java | 5 +++-- .../query/aggregation/last/DoubleLastAggregator.java | 10 +++++++--- .../last/DoubleLastAggregatorFactory.java | 7 ++++--- .../query/aggregation/last/LongLastAggregator.java | 10 +++++++--- .../aggregation/last/LongLastAggregatorFactory.java | 5 +++-- 9 files changed, 44 insertions(+), 22 deletions(-) diff --git a/docs/content/querying/aggregations.md b/docs/content/querying/aggregations.md index c46750cbb5bb..2382e76aa2b2 100644 --- a/docs/content/querying/aggregations.md +++ b/docs/content/querying/aggregations.md @@ -80,6 +80,8 @@ Computes the sum of values as 64-bit floating point value. Similar to `longSum` First and Last aggregator cannot be used in ingestion spec, and should only be specified as part of queries. +Note that queries with first/last aggregators on a segment created with rollup enabled will return the rolled up value, and not the last value within the raw ingested data. + #### `doubleFirst` aggregator `doubleFirst` computes the metric value with the minimum timestamp or 0 if no row exist diff --git a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregator.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregator.java index 352536ed7b41..bc709de39138 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregator.java @@ -31,10 +31,14 @@ public class DoubleFirstAggregator implements Aggregator private final LongColumnSelector timeSelector; private final String name; - long firstTime; - double firstValue; - - public DoubleFirstAggregator(String name, FloatColumnSelector valueSelector, LongColumnSelector timeSelector) + protected long firstTime; + protected double firstValue; + + public DoubleFirstAggregator( + String name, + LongColumnSelector timeSelector, + FloatColumnSelector valueSelector + ) { this.name = name; this.valueSelector = valueSelector; diff --git a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java index 32c0e7d9f7f5..192f8c363301 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java @@ -82,8 +82,9 @@ public DoubleFirstAggregatorFactory( public Aggregator factorize(ColumnSelectorFactory metricFactory) { return new DoubleFirstAggregator( - name, metricFactory.makeFloatColumnSelector(fieldName), - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + name, + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeFloatColumnSelector(fieldName) ); } diff --git a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregator.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregator.java index 8c185015549c..51a5309c73a8 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregator.java @@ -30,10 +30,14 @@ public class LongFirstAggregator implements Aggregator private final LongColumnSelector timeSelector; private final String name; - long firstTime; - long firstValue; + protected long firstTime; + protected long firstValue; - public LongFirstAggregator(String name, LongColumnSelector valueSelector, LongColumnSelector timeSelector) + public LongFirstAggregator( + String name, + LongColumnSelector timeSelector, + LongColumnSelector valueSelector + ) { this.name = name; this.valueSelector = valueSelector; diff --git a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java index c5ad1bf24531..459cf4ac2fba 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java @@ -72,8 +72,9 @@ public LongFirstAggregatorFactory( public Aggregator factorize(ColumnSelectorFactory metricFactory) { return new LongFirstAggregator( - name, metricFactory.makeLongColumnSelector(fieldName), - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + name, + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeLongColumnSelector(fieldName) ); } diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregator.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregator.java index 951051179b98..3e72387ea401 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregator.java @@ -31,10 +31,14 @@ public class DoubleLastAggregator implements Aggregator private final LongColumnSelector timeSelector; private final String name; - long lastTime; - double lastValue; + protected long lastTime; + protected double lastValue; - public DoubleLastAggregator(String name, FloatColumnSelector valueSelector, LongColumnSelector timeSelector) + public DoubleLastAggregator( + String name, + LongColumnSelector timeSelector, + FloatColumnSelector valueSelector + ) { this.name = name; this.valueSelector = valueSelector; diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java index 8b6fc32369fc..302a1cfa791c 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java @@ -65,8 +65,9 @@ public DoubleLastAggregatorFactory( public Aggregator factorize(ColumnSelectorFactory metricFactory) { return new DoubleLastAggregator( - name, metricFactory.makeFloatColumnSelector(fieldName), - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + name, + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeFloatColumnSelector(fieldName) ); } @@ -239,7 +240,7 @@ public int hashCode() @Override public String toString() { - return "DoubleFirstAggregatorFactory{" + + return "DoubleLastAggregatorFactory{" + "name='" + name + '\'' + ", fieldName='" + fieldName + '\'' + '}'; diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregator.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregator.java index a728714011c4..bc312918eeb5 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregator.java @@ -29,10 +29,14 @@ public class LongLastAggregator implements Aggregator private final LongColumnSelector timeSelector; private final String name; - long lastTime; - long lastValue; + protected long lastTime; + protected long lastValue; - public LongLastAggregator(String name, LongColumnSelector valueSelector, LongColumnSelector timeSelector) + public LongLastAggregator( + String name, + LongColumnSelector timeSelector, + LongColumnSelector valueSelector + ) { this.name = name; this.valueSelector = valueSelector; diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java index c11686418387..481e888bbaa1 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java @@ -64,8 +64,9 @@ public LongLastAggregatorFactory( public Aggregator factorize(ColumnSelectorFactory metricFactory) { return new LongLastAggregator( - name, metricFactory.makeLongColumnSelector(fieldName), - metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME) + name, + metricFactory.makeLongColumnSelector(Column.TIME_COLUMN_NAME), + metricFactory.makeLongColumnSelector(fieldName) ); } From 11e50b6775d221916fc0f16b12176a8b7752f6b1 Mon Sep 17 00:00:00 2001 From: jon-wei Date: Fri, 16 Dec 2016 13:59:22 -0800 Subject: [PATCH 10/10] Add first/last aggs to ITWikipediaQueryTest --- .../queries/wikipedia_editstream_queries.json | 178 +++++++++++++++++- 1 file changed, 177 insertions(+), 1 deletion(-) diff --git a/integration-tests/src/test/resources/queries/wikipedia_editstream_queries.json b/integration-tests/src/test/resources/queries/wikipedia_editstream_queries.json index 5873a87d95b1..791d296d2500 100644 --- a/integration-tests/src/test/resources/queries/wikipedia_editstream_queries.json +++ b/integration-tests/src/test/resources/queries/wikipedia_editstream_queries.json @@ -73,6 +73,26 @@ "type": "hyperUnique", "fieldName": "unique_users", "name": "unique_users" + }, + { + "type" : "doubleFirst", + "name" : "firstAdded", + "fieldName" : "added" + }, + { + "type" : "doubleLast", + "name" : "lastAdded", + "fieldName" : "added" + }, + { + "type" : "longFirst", + "name" : "firstCount", + "fieldName" : "count" + }, + { + "type" : "longLast", + "name" : "lastCount", + "fieldName" : "count" } ], "context": { @@ -87,6 +107,10 @@ "result": { "added": 9.11526338E8, "count": 2815650, + "firstAdded": 39.0, + "lastAdded": 210.0, + "firstCount": 1, + "lastCount": 1, "delta": 5.48967603E8, "variation": 1.274085073E9, "delta_hist": { @@ -174,6 +198,26 @@ "type": "hyperUnique", "fieldName": "unique_users", "name": "unique_users" + }, + { + "type" : "doubleFirst", + "name" : "firstAdded", + "fieldName" : "added" + }, + { + "type" : "doubleLast", + "name" : "lastAdded", + "fieldName" : "added" + }, + { + "type" : "longFirst", + "name" : "firstCount", + "fieldName" : "count" + }, + { + "type" : "longLast", + "name" : "lastCount", + "fieldName" : "count" } ], "context": { @@ -188,6 +232,10 @@ "result": { "added": 3.49393993E8, "count": 1829240, + "firstAdded": 39.0, + "lastAdded": 210.0, + "firstCount": 1, + "lastCount": 1, "delta": 2.24089868E8, "variation": 4.74698118E8, "delta_hist": { @@ -365,6 +413,26 @@ "type": "hyperUnique", "fieldName": "unique_users", "name": "unique_users" + }, + { + "type" : "doubleFirst", + "name" : "firstAdded", + "fieldName" : "added" + }, + { + "type" : "doubleLast", + "name" : "lastAdded", + "fieldName" : "added" + }, + { + "type" : "longFirst", + "name" : "firstCount", + "fieldName" : "count" + }, + { + "type" : "longLast", + "name" : "lastCount", + "fieldName" : "count" } ], "dimension": "page", @@ -383,6 +451,10 @@ { "added": 1812960.0, "count": 1697, + "firstCount": 2, + "lastCount": 3, + "firstAdded": 462.0, + "lastAdded": 1871.0, "page": "Wikipedia:Administrators'_noticeboard/Incidents", "delta": 770071.0, "variation": 2855849.0, @@ -393,6 +465,10 @@ { "added": 70162.0, "count": 967, + "firstCount": 1, + "lastCount": 1, + "firstAdded": 12.0, + "lastAdded": 129.0, "page": "2013", "delta": 40872.0, "variation": 99452.0, @@ -403,6 +479,10 @@ { "added": 519152.0, "count": 1700, + "firstCount": 1, + "lastCount": 5, + "firstAdded": 0.0, + "lastAdded": 2399.0, "page": "Wikipedia:Vandalismusmeldung", "delta": -5446.0, "variation": 1043750.0, @@ -480,6 +560,26 @@ "type": "hyperUnique", "fieldName": "unique_users", "name": "unique_users" + }, + { + "type" : "doubleFirst", + "name" : "firstAdded", + "fieldName" : "added" + }, + { + "type" : "doubleLast", + "name" : "lastAdded", + "fieldName" : "added" + }, + { + "type" : "longFirst", + "name" : "firstCount", + "fieldName" : "count" + }, + { + "type" : "longLast", + "name" : "lastCount", + "fieldName" : "count" } ], "dimension": "page", @@ -498,6 +598,10 @@ { "added": 61739.0, "count": 852, + "firstCount": 1, + "lastCount": 1, + "firstAdded": 12.0, + "lastAdded": 129.0, "page": "2013", "delta": 35313.0, "variation": 88165.0, @@ -508,6 +612,10 @@ { "added": 28288.0, "count": 513, + "firstCount": 1, + "lastCount": 1, + "firstAdded": 29.0, + "lastAdded": 37.0, "page": "Gérard_Depardieu", "delta": 7027.0, "variation": 49549.0, @@ -518,6 +626,10 @@ { "added": 10951.0, "count": 459, + "firstCount": 1, + "lastCount": 1, + "firstAdded": 29.0, + "lastAdded": 35.0, "page": "Zichyújfalu", "delta": 9030.0, "variation": 12872.0, @@ -570,6 +682,26 @@ "type": "hyperUnique", "fieldName": "unique_users", "name": "unique_users" + }, + { + "type" : "doubleFirst", + "name" : "firstAdded", + "fieldName" : "added" + }, + { + "type" : "doubleLast", + "name" : "lastAdded", + "fieldName" : "added" + }, + { + "type" : "longFirst", + "name" : "firstCount", + "fieldName" : "count" + }, + { + "type" : "longLast", + "name" : "lastCount", + "fieldName" : "count" } ], "postAggregations": [ @@ -619,6 +751,10 @@ { "added": 151409.0, "count": 1770, + "firstCount": 9, + "lastCount": 9, + "firstAdded": 1612.0, + "lastAdded": 560.0, "page": "User:Cyde/List_of_candidates_for_speedy_deletion/Subpage", "delta": 670.0, "variation": 302148.0, @@ -630,6 +766,10 @@ { "added": 519152.0, "count": 1700, + "firstCount": 1, + "lastCount": 5, + "firstAdded": 0.0, + "lastAdded": 2399.0, "page": "Wikipedia:Vandalismusmeldung", "delta": -5446.0, "variation": 1043750.0, @@ -641,6 +781,10 @@ { "added": 1812960.0, "count": 1697, + "firstCount": 2, + "lastCount": 3, + "firstAdded": 462.0, + "lastAdded": 1871.0, "page": "Wikipedia:Administrators'_noticeboard/Incidents", "delta": 770071.0, "variation": 2855849.0, @@ -865,7 +1009,7 @@ ] }, { - "description": "groupBy, two aggs, namespace + robot dim, postAggs", + "description": "groupBy, six aggs, namespace + robot dim, postAggs", "query": { "queryType": "groupBy", "dataSource": "wikipedia_editstream", @@ -880,6 +1024,26 @@ "type": "longSum", "fieldName": "count", "name": "count" + }, + { + "type" : "doubleFirst", + "name" : "firstAdded", + "fieldName" : "added" + }, + { + "type" : "doubleLast", + "name" : "lastAdded", + "fieldName" : "added" + }, + { + "type" : "longFirst", + "name" : "firstCount", + "fieldName" : "count" + }, + { + "type" : "longLast", + "name" : "lastCount", + "fieldName" : "count" } ], "postAggregations": [ @@ -920,6 +1084,10 @@ "event": { "sumOfRowsAndCount": 2268154.0, "count": 1286354, + "firstCount": 1, + "lastCount": 1, + "firstAdded": 70.0, + "lastAdded": 210.0, "robot": "0", "rows": 981800, "namespace": "article" @@ -931,6 +1099,10 @@ "event": { "sumOfRowsAndCount": 1385233.0, "count": 693711, + "firstCount": 1, + "lastCount": 1, + "firstAdded": 39.0, + "lastAdded": 0.0, "robot": "1", "rows": 691522, "namespace": "article" @@ -942,6 +1114,10 @@ "event": { "sumOfRowsAndCount": 878393.0, "count": 492643, + "firstCount": 2, + "lastCount": 1, + "firstAdded": 431.0, + "lastAdded": 43.0, "robot": "0", "rows": 385750, "namespace": "wikipedia"