Skip to content

Commit

Permalink
remove size from default analysisTypes list for segmentMetadata query (
Browse files Browse the repository at this point in the history
  • Loading branch information
himanshug authored and dgolitsyn committed Feb 14, 2017
1 parent 9c63930 commit fe6d84e
Show file tree
Hide file tree
Showing 9 changed files with 30 additions and 29 deletions.
2 changes: 1 addition & 1 deletion docs/content/querying/segmentmetadataquery.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ There are several main parts to a segment metadata query:
|toInclude|A JSON Object representing what columns should be included in the result. Defaults to "all".|no|
|merge|Merge all individual segment metadata results into a single result|no|
|context|See [Context](../querying/query-context.html)|no|
|analysisTypes|A list of Strings specifying what column properties (e.g. cardinality, size) should be calculated and returned in the result. Defaults to ["cardinality", "size", "interval", "minmax"]. See section [analysisTypes](#analysistypes) for more details.|no|
|analysisTypes|A list of Strings specifying what column properties (e.g. cardinality, size) should be calculated and returned in the result. Defaults to ["cardinality", "interval", "minmax"]. See section [analysisTypes](#analysistypes) for more details.|no|
|lenientAggregatorMerge|If true, and if the "aggregators" analysisType is enabled, aggregators will be merged leniently. See below for details.|no|

The format of the result is:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"columns": {
"location": {
"type": "STRING",
"size": 10140,
"size": 0,
"hasMultipleValues": false,
"minValue": "location_1",
"maxValue": "location_5",
Expand All @@ -43,7 +43,7 @@
},
"__time": {
"type": "LONG",
"size": 10140,
"size": 0,
"hasMultipleValues": false,
"minValue": null,
"maxValue": null,
Expand All @@ -52,15 +52,15 @@
},
"product": {
"type": "STRING",
"size": 9531,
"size": 0,
"hasMultipleValues": false,
"minValue": "product_1",
"maxValue": "product_9",
"cardinality": 15,
"errorMessage": null
}
},
"size": 34881,
"size": 0,
"numRows": 1014,
"aggregators": null,
"timestampSpec": null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -598,14 +598,14 @@
"has_links": {
"type": "STRING",
"hasMultipleValues": false,
"size": 7773438,
"size": 0,
"cardinality": 2,
"minValue":"No",
"maxValue":"Yes",
"errorMessage": null
}
},
"size": 747056474,
"size": 0,
"numRows": 3702583,
"aggregators": null,
"timestampSpec": null,
Expand All @@ -619,14 +619,14 @@
"has_links": {
"type": "STRING",
"hasMultipleValues": false,
"size": 7901000,
"size": 0,
"cardinality": 2,
"minValue":"No",
"maxValue":"Yes",
"errorMessage": null
}
},
"size": 755796690,
"size": 0,
"numRows": 3743002,
"aggregators": null,
"timestampSpec": null,
Expand All @@ -640,14 +640,14 @@
"has_links": {
"type": "STRING",
"hasMultipleValues": false,
"size": 7405654,
"size": 0,
"cardinality": 2,
"minValue":"No",
"maxValue":"Yes",
"errorMessage": null
}
},
"size": 706893542,
"size": 0,
"numRows":3502959,
"aggregators": null,
"timestampSpec": null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1048,7 +1048,7 @@
"country_name": {
"type": "STRING",
"hasMultipleValues": false,
"size": 41922148,
"size": 0,
"cardinality": 208,
"minValue":"",
"maxValue":"mmx._unknown",
Expand All @@ -1057,14 +1057,14 @@
"language": {
"type": "STRING",
"hasMultipleValues": false,
"size": 8924222,
"size": 0,
"cardinality": 36,
"minValue":"ar",
"maxValue":"zh",
"errorMessage": null
}
},
"size": 902457341,
"size": 0,
"numRows": 4462111,
"aggregators": null,
"timestampSpec": null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ public byte[] getCacheKey()

public static final EnumSet<AnalysisType> DEFAULT_ANALYSIS_TYPES = EnumSet.of(
AnalysisType.CARDINALITY,
AnalysisType.SIZE,
AnalysisType.INTERVAL,
AnalysisType.MINMAX
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,11 +91,7 @@ private void testIncrementalWorksHelper(EnumSet<SegmentMetadataQuery.AnalysisTyp
final ColumnAnalysis columnAnalysis = columns.get(metric);

Assert.assertEquals(metric, ValueType.FLOAT.name(), columnAnalysis.getType());
if (analyses == null) {
Assert.assertTrue(metric, columnAnalysis.getSize() > 0);
} else {
Assert.assertEquals(metric, 0, columnAnalysis.getSize());
}
Assert.assertEquals(metric, 0, columnAnalysis.getSize());
Assert.assertNull(metric, columnAnalysis.getCardinality());
}
}
Expand Down Expand Up @@ -131,12 +127,11 @@ private void testMappedWorksHelper(EnumSet<SegmentMetadataQuery.AnalysisType> an
Assert.assertNull(columnAnalysis);
} else {
Assert.assertEquals(dimension, ValueType.STRING.name(), columnAnalysis.getType());
Assert.assertEquals(dimension, 0, columnAnalysis.getSize());
if (analyses == null) {
Assert.assertTrue(dimension, columnAnalysis.getSize() > 0);
Assert.assertTrue(dimension, columnAnalysis.getCardinality() > 0);
} else {
Assert.assertEquals(dimension, 0, columnAnalysis.getCardinality().longValue());
Assert.assertEquals(dimension, 0, columnAnalysis.getSize());
}
}
}
Expand All @@ -145,11 +140,7 @@ private void testMappedWorksHelper(EnumSet<SegmentMetadataQuery.AnalysisType> an
final ColumnAnalysis columnAnalysis = columns.get(metric);

Assert.assertEquals(metric, ValueType.FLOAT.name(), columnAnalysis.getType());
if (analyses == null) {
Assert.assertTrue(metric, columnAnalysis.getSize() > 0);
} else {
Assert.assertEquals(metric, 0, columnAnalysis.getSize());
}
Assert.assertEquals(metric, 0, columnAnalysis.getSize());
Assert.assertNull(metric, columnAnalysis.getCardinality());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public void testCacheStrategy() throws Exception
new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(query);

// Test cache key generation
byte[] expectedKey = {0x04, 0x01, (byte) 0xFF, 0x00, 0x01, 0x02, 0x04};
byte[] expectedKey = {0x04, 0x01, (byte) 0xFF, 0x00, 0x02, 0x04};
byte[] actualKey = strategy.computeCacheKey(query);
Assert.assertArrayEquals(expectedKey, actualKey);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,12 @@ public SegmentMetadataQueryTest(
.dataSource("testing")
.intervals("2013/2014")
.toInclude(new ListColumnIncluderator(Arrays.asList("__time", "index", "placement")))
.analysisTypes(null)
.analysisTypes(
SegmentMetadataQuery.AnalysisType.CARDINALITY,
SegmentMetadataQuery.AnalysisType.SIZE,
SegmentMetadataQuery.AnalysisType.INTERVAL,
SegmentMetadataQuery.AnalysisType.MINMAX
)
.merge(true)
.build();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,12 @@ public void testSegmentMetadataUnionQuery()
.dataSource(QueryRunnerTestHelper.unionDataSource)
.intervals(QueryRunnerTestHelper.fullOnInterval)
.toInclude(new ListColumnIncluderator(Lists.newArrayList("placement")))
.analysisTypes(
SegmentMetadataQuery.AnalysisType.CARDINALITY,
SegmentMetadataQuery.AnalysisType.SIZE,
SegmentMetadataQuery.AnalysisType.INTERVAL,
SegmentMetadataQuery.AnalysisType.MINMAX
)
.build();
List result = Sequences.toList(runner.run(query, Maps.newHashMap()), Lists.<SegmentAnalysis>newArrayList());
TestHelper.assertExpectedObjects(ImmutableList.of(expected), result, "failed SegmentMetadata union query");
Expand Down

0 comments on commit fe6d84e

Please sign in to comment.