Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[7.x] Add time_series_dimension and time_series_metric mapping parameters #78265

Merged
merged 9 commits into from
Sep 27, 2021
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ public class ScriptScoreBenchmark {
private final Map<String, MappedFieldType> fieldTypes = org.elasticsearch.core.Map.ofEntries(
org.elasticsearch.core.Map.entry(
"n",
new NumberFieldType("n", NumberType.LONG, false, false, true, true, null, org.elasticsearch.core.Map.of(), null)
new NumberFieldType("n", NumberType.LONG, false, false, true, true, null, org.elasticsearch.core.Map.of(), null, false, null)
)
);
private final IndexFieldDataCache fieldDataCache = new IndexFieldDataCache.None();
Expand Down
1 change: 0 additions & 1 deletion docs/reference/mapping/types/keyword.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -171,4 +171,3 @@ Dimension fields have the following constraints:
include::constant-keyword.asciidoc[]

include::wildcard.asciidoc[]

Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,13 @@ public static class Builder extends FieldMapper.Builder {

private final Parameter<Map<String, String>> meta = Parameter.metaParam();

/**
* Parameter that marks this field as a time series metric defining its time series metric type.
* For the numeric fields gauge and counter metric types are
* supported
*/
private final Parameter<TimeSeriesParams.MetricType> metric;

public Builder(String name, Settings settings) {
this(name, IGNORE_MALFORMED_SETTING.get(settings), COERCE_SETTING.get(settings));
}
Expand All @@ -95,6 +102,18 @@ public Builder(String name, boolean ignoreMalformedByDefault, boolean coerceByDe
= Parameter.explicitBoolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault);
this.coerce
= Parameter.explicitBoolParam("coerce", true, m -> toType(m).coerce, coerceByDefault);

this.metric = TimeSeriesParams.metricParam(
m -> toType(m).metricType,
TimeSeriesParams.MetricType.gauge,
TimeSeriesParams.MetricType.counter
).addValidator(v -> {
if (v != null && hasDocValues.getValue() == false) {
throw new IllegalArgumentException(
"Field [" + TimeSeriesParams.TIME_SERIES_METRIC_PARAM + "] requires that [" + hasDocValues.name + "] is true"
);
}
});
}

Builder scalingFactor(double scalingFactor) {
Expand All @@ -107,15 +126,28 @@ Builder nullValue(double nullValue) {
return this;
}

public Builder metric(TimeSeriesParams.MetricType metric) {
this.metric.setValue(metric);
return this;
}

@Override
protected List<Parameter<?>> getParameters() {
return Arrays.asList(indexed, hasDocValues, stored, ignoreMalformed, meta, scalingFactor, coerce, nullValue);
return Arrays.asList(indexed, hasDocValues, stored, ignoreMalformed, meta, scalingFactor, coerce, nullValue, metric);
}

@Override
public ScaledFloatFieldMapper build(MapperBuilderContext context) {
ScaledFloatFieldType type = new ScaledFloatFieldType(context.buildFullName(name), indexed.getValue(), stored.getValue(),
hasDocValues.getValue(), meta.getValue(), scalingFactor.getValue(), nullValue.getValue());
ScaledFloatFieldType type = new ScaledFloatFieldType(
context.buildFullName(name),
indexed.getValue(),
stored.getValue(),
hasDocValues.getValue(),
meta.getValue(),
scalingFactor.getValue(),
nullValue.getValue(),
metric.getValue()
);
return new ScaledFloatFieldMapper(name, type, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
}
Expand All @@ -126,16 +158,20 @@ public static final class ScaledFloatFieldType extends SimpleMappedFieldType {

private final double scalingFactor;
private final Double nullValue;
private final TimeSeriesParams.MetricType metricType;


public ScaledFloatFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues,
Map<String, String> meta, double scalingFactor, Double nullValue) {
Map<String, String> meta, double scalingFactor, Double nullValue,
TimeSeriesParams.MetricType metricType) {
super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta);
this.scalingFactor = scalingFactor;
this.nullValue = nullValue;
this.metricType = metricType;
}

public ScaledFloatFieldType(String name, double scalingFactor) {
this(name, true, false, true, Collections.emptyMap(), scalingFactor, null);
this(name, true, false, true, Collections.emptyMap(), scalingFactor, null, null);
}

public double getScalingFactor() {
Expand Down Expand Up @@ -266,6 +302,14 @@ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
private double scale(Object input) {
return new BigDecimal(Double.toString(parse(input))).multiply(BigDecimal.valueOf(scalingFactor)).doubleValue();
}

/**
* If field is a time series metric field, returns its metric type
* @return the metric type or null
*/
public TimeSeriesParams.MetricType getMetricType() {
return metricType;
}
}

private final Explicit<Boolean> ignoreMalformed;
Expand All @@ -278,6 +322,7 @@ private double scale(Object input) {

private final boolean ignoreMalformedByDefault;
private final boolean coerceByDefault;
private final TimeSeriesParams.MetricType metricType;

private ScaledFloatFieldMapper(
String simpleName,
Expand All @@ -295,6 +340,7 @@ private ScaledFloatFieldMapper(
this.coerce = builder.coerce.getValue();
this.ignoreMalformedByDefault = builder.ignoreMalformed.getDefaultValue().value();
this.coerceByDefault = builder.coerce.getDefaultValue().value();
this.metricType = builder.metric.getValue();
}

boolean coerce() {
Expand All @@ -317,12 +363,11 @@ protected String contentType() {

@Override
public FieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), ignoreMalformedByDefault, coerceByDefault).init(this);
return new Builder(simpleName(), ignoreMalformedByDefault, coerceByDefault).metric(metricType).init(this);
}

@Override
protected void parseCreateField(DocumentParserContext context) throws IOException {

XContentParser parser = context.parser();
Object value;
Number numericValue = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,27 @@ public TokenCountFieldMapper build(MapperBuilderContext context) {

static class TokenCountFieldType extends NumberFieldMapper.NumberFieldType {

TokenCountFieldType(String name, boolean isSearchable, boolean isStored,
boolean hasDocValues, Number nullValue, Map<String, String> meta) {
super(name, NumberFieldMapper.NumberType.INTEGER, isSearchable, isStored, hasDocValues, false, nullValue, meta, null);
TokenCountFieldType(
String name,
boolean isSearchable,
boolean isStored,
boolean hasDocValues,
Number nullValue,
Map<String, String> meta
) {
super(
name,
NumberFieldMapper.NumberType.INTEGER,
isSearchable,
isStored,
hasDocValues,
false,
nullValue,
meta,
null,
false,
null
);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,47 @@ public void testRejectIndexOptions() {
assertWarnings("Parameter [index_options] has no effect on type [scaled_float] and will be removed in future");
}

public void testMetricType() throws IOException {
// Test default setting
MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b)));
ScaledFloatFieldMapper.ScaledFloatFieldType ft = (ScaledFloatFieldMapper.ScaledFloatFieldType) mapperService.fieldType("field");
assertNull(ft.getMetricType());

assertMetricType("gauge", ScaledFloatFieldMapper.ScaledFloatFieldType::getMetricType);
assertMetricType("counter", ScaledFloatFieldMapper.ScaledFloatFieldType::getMetricType);

{
// Test invalid metric type for this field type
Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
minimalMapping(b);
b.field("time_series_metric", "histogram");
})));
assertThat(
e.getCause().getMessage(),
containsString("Unknown value [histogram] for field [time_series_metric] - accepted values are [gauge, counter]")
);
}
{
// Test invalid metric type for this field type
Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
minimalMapping(b);
b.field("time_series_metric", "unknown");
})));
assertThat(
e.getCause().getMessage(),
containsString("Unknown value [unknown] for field [time_series_metric] - accepted values are [gauge, counter]")
);
}
}

public void testMetricAndDocvalues() {
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
minimalMapping(b);
b.field("time_series_metric", "counter").field("doc_values", false);
})));
assertThat(e.getCause().getMessage(), containsString("Field [time_series_metric] requires that [doc_values] is true"));
}

@Override
protected void randomFetchTestFieldConfig(XContentBuilder b) throws IOException {
// Large floats are a terrible idea but the round trip should still work no matter how badly you configure the field
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,15 @@ public void testRangeQuery() throws IOException {
// this test checks that searching scaled floats yields the same results as
// searching doubles that are rounded to the closest half float
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(
"scaled_float", true, false, false, Collections.emptyMap(), 0.1 + randomDouble() * 100, null);
"scaled_float",
true,
false,
false,
Collections.emptyMap(),
0.1 + randomDouble() * 100,
null,
null
);
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
final int numDocs = 1000;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,14 @@ enable:
type: date
metricset:
type: keyword
time_series_dimension: true
k8s:
properties:
pod:
properties:
uid:
type: keyword
time_series_dimension: true
name:
type: keyword
ip:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
add time series mappings:
- skip:
version: " - 7.99.99"
reason: introduced in 8.0.0 to be backported to 7.16.0

- do:
indices.create:
index: test_index
body:
settings:
index:
mode: time_series
number_of_replicas: 0
number_of_shards: 2
mappings:
properties:
"@timestamp":
type: date
metricset:
type: keyword
time_series_dimension: true
k8s:
properties:
pod:
properties:
availability_zone:
type: short
time_series_dimension: true
uid:
type: keyword
time_series_dimension: true
name:
type: keyword
ip:
type: ip
time_series_dimension: true
network:
properties:
tx:
type: long
time_series_metric: counter
rx:
type: integer
time_series_metric: gauge
packets_dropped:
type: long
time_series_metric: gauge
latency:
type: double
time_series_metric: gauge
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING,
MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING,
MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING,
MapperService.INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING,
MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING,
BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING,
IndexModule.INDEX_STORE_TYPE_SETTING,
Expand Down
12 changes: 12 additions & 0 deletions server/src/main/java/org/elasticsearch/index/IndexSettings.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import java.util.function.Function;

import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING;
import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING;
import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING;
import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING;
import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING;
Expand Down Expand Up @@ -452,6 +453,7 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) {
private volatile long mappingTotalFieldsLimit;
private volatile long mappingDepthLimit;
private volatile long mappingFieldNameLengthLimit;
private volatile long mappingDimensionFieldsLimit;

/**
* The maximum number of refresh listeners allows on this shard.
Expand Down Expand Up @@ -579,6 +581,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti
mappingTotalFieldsLimit = scopedSettings.get(INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING);
mappingDepthLimit = scopedSettings.get(INDEX_MAPPING_DEPTH_LIMIT_SETTING);
mappingFieldNameLengthLimit = scopedSettings.get(INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING);
mappingDimensionFieldsLimit = scopedSettings.get(INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING);

scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, mergePolicyConfig::setNoCFSRatio);
scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_DELETES_PCT_ALLOWED_SETTING,
Expand Down Expand Up @@ -637,6 +640,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti
scopedSettings.addSettingsUpdateConsumer(INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING, this::setMappingTotalFieldsLimit);
scopedSettings.addSettingsUpdateConsumer(INDEX_MAPPING_DEPTH_LIMIT_SETTING, this::setMappingDepthLimit);
scopedSettings.addSettingsUpdateConsumer(INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING, this::setMappingFieldNameLengthLimit);
scopedSettings.addSettingsUpdateConsumer(INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING, this::setMappingDimensionFieldsLimit);
}

private void setSearchIdleAfter(TimeValue searchIdleAfter) { this.searchIdleAfter = searchIdleAfter; }
Expand Down Expand Up @@ -1173,4 +1177,12 @@ public long getMappingFieldNameLengthLimit() {
private void setMappingFieldNameLengthLimit(long value) {
this.mappingFieldNameLengthLimit = value;
}

public long getMappingDimensionFieldsLimit() {
return mappingDimensionFieldsLimit;
}

private void setMappingDimensionFieldsLimit(long value) {
this.mappingDimensionFieldsLimit = value;
}
}
Loading