Skip to content
This repository has been archived by the owner on Jun 26, 2024. It is now read-only.

chore: use filter in entities query for selections as well #201

Merged
merged 6 commits into from
Jun 15, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package org.hypertrace.gateway.service.common;

import static java.util.Collections.emptyMap;
import static java.util.function.Predicate.not;
import static org.hypertrace.core.attribute.service.v1.AttributeSource.QS;

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
Expand Down Expand Up @@ -58,6 +60,7 @@ public class ExpressionContext {
private ImmutableMap<String, List<Expression>> sourceToFilterExpressionMap;
private ImmutableMap<String, Set<String>> sourceToFilterAttributeMap;
private ImmutableMap<String, Set<String>> filterAttributeToSourceMap;
private Map<AttributeSource, Filter> sourceToFilterMap;

// and filter
private boolean isAndFilter;
Expand Down Expand Up @@ -92,6 +95,9 @@ public ExpressionContext(
buildSourceToGroupByExpressionMaps();

this.isAndFilter = gatewayServiceConfig.isEntityAndFilterEnabled() && isAndFilter(filter);
// build source to filter map only if we only have AND filter
this.sourceToFilterMap =
isAndFilter(filter) ? buildSourceToAndFilterMap(filter) : Collections.emptyMap();
}

public Map<String, List<Expression>> getSourceToSelectionExpressionMap() {
Expand All @@ -106,6 +112,10 @@ public void setSourceToSelectionExpressionMap(
.build();
}

public Map<AttributeSource, Filter> getSourceToFilterMap() {
return sourceToFilterMap;
}

public Map<String, Set<String>> getSourceToSelectionAttributeMap() {
return sourceToSelectionAttributeMap;
}
Expand Down Expand Up @@ -620,6 +630,45 @@ private static Set<String> getIntersectingSourceSets(
.orElse(Collections.emptySet());
}

private Map<AttributeSource, Filter> buildSourceToAndFilterMap(Filter filter) {
Operator operator = filter.getOperator();
if (operator == Operator.AND) {
return filter.getChildFilterList().stream()
.map(this::buildSourceToAndFilterMap)
.flatMap(map -> map.entrySet().stream())
.collect(
Collectors.toUnmodifiableMap(
Map.Entry::getKey,
Map.Entry::getValue,
(value1, value2) ->
Filter.newBuilder()
.setOperator(Operator.AND)
.addChildFilter(value1)
.addChildFilter(value2)
.build()));

} else if (operator == Operator.OR) {
return Collections.emptyMap();
} else {
List<AttributeSource> attributeSources = getAttributeSources(filter.getLhs());
if (attributeSources.isEmpty()) {
return emptyMap();
}

return attributeSources.contains(QS)
? Map.of(QS, filter)
: Map.of(attributeSources.get(0), filter);
}
}

public List<AttributeSource> getAttributeSources(Expression expression) {
Set<String> attributeIds = ExpressionReader.extractAttributeIds(expression);
return attributeIds.stream()
.map(attributeId -> attributeMetadataMap.get(attributeId).getSourcesList())
.flatMap(Collection::stream)
.collect(Collectors.toUnmodifiableList());
}

@Override
public String toString() {
return "ExpressionContext{"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,32 +1,25 @@
package org.hypertrace.gateway.service.entity.query;

import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableList;
import static org.hypertrace.core.attribute.service.v1.AttributeSource.EDS;
import static org.hypertrace.core.attribute.service.v1.AttributeSource.QS;

import com.google.common.annotations.VisibleForTesting;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.hypertrace.core.attribute.service.v1.AttributeMetadata;
import org.hypertrace.core.attribute.service.v1.AttributeSource;
import org.hypertrace.gateway.service.common.ExpressionContext;
import org.hypertrace.gateway.service.common.util.ExpressionReader;
import org.hypertrace.gateway.service.common.util.TimeRangeFilterUtil;
import org.hypertrace.gateway.service.entity.query.visitor.ExecutionContextBuilderVisitor;
import org.hypertrace.gateway.service.entity.query.visitor.FilterOptimizingVisitor;
import org.hypertrace.gateway.service.entity.query.visitor.PrintVisitor;
import org.hypertrace.gateway.service.v1.common.Expression;
import org.hypertrace.gateway.service.v1.common.Filter;
import org.hypertrace.gateway.service.v1.common.Operator;
import org.hypertrace.gateway.service.v1.common.OrderByExpression;
Expand All @@ -39,18 +32,11 @@ public class ExecutionTreeBuilder {

private static final Logger LOG = LoggerFactory.getLogger(ExecutionTreeBuilder.class);

private final Map<String, AttributeMetadata> attributeMetadataMap;
private final EntityExecutionContext executionContext;
private final Set<String> sourceSetsIfFilterAndOrderByAreFromSameSourceSets;

public ExecutionTreeBuilder(EntityExecutionContext executionContext) {
this.executionContext = executionContext;
this.attributeMetadataMap =
executionContext
.getAttributeMetadataProvider()
.getAttributesMetadata(
executionContext.getEntitiesRequestContext(),
executionContext.getEntitiesRequest().getEntityType());

this.sourceSetsIfFilterAndOrderByAreFromSameSourceSets =
ExpressionContext.getSourceSetsIfFilterAndOrderByAreFromSameSourceSets(
Expand Down Expand Up @@ -132,7 +118,7 @@ public QueryNode build() {

ExecutionTreeUtils.removeDuplicateSelectionAttributes(executionContext, QS.name());

QueryNode filterTree = buildFilterTree(executionContext, entitiesRequest.getFilter());
QueryNode filterTree = buildFilterTreeNode(executionContext, entitiesRequest.getFilter());
if (LOG.isDebugEnabled()) {
LOG.debug("Filter Tree:{}", filterTree.acceptVisitor(new PrintVisitor()));
}
Expand Down Expand Up @@ -215,6 +201,8 @@ private QueryNode buildExecutionTreeForEdsFilterAndSelection() {
@VisibleForTesting
QueryNode buildExecutionTree(EntityExecutionContext executionContext, QueryNode filterTree) {
QueryNode rootNode = filterTree;
// set up source filter to be used during selections
// filterTree.acceptVisitor(new SourceFilterVisitor(executionContext));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do we need this?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

removed this commented section

// Select attributes from sources in order by but not part of the filter tree
Set<String> attrSourcesForOrderBy = executionContext.getPendingSelectionSourcesForOrderBy();
if (!attrSourcesForOrderBy.isEmpty()) {
Expand Down Expand Up @@ -268,8 +256,7 @@ QueryNode buildExecutionTree(EntityExecutionContext executionContext, QueryNode
return rootNode;
}

@VisibleForTesting
QueryNode buildFilterTree(EntityExecutionContext context, Filter filter) {
QueryNode buildFilterTreeNode(EntityExecutionContext context, Filter filter) {
EntitiesRequest entitiesRequest = executionContext.getEntitiesRequest();
// Convert the time range into a filter and set it on the request so that all downstream
// components needn't treat it specially
Expand All @@ -281,29 +268,29 @@ QueryNode buildFilterTree(EntityExecutionContext context, Filter filter) {
entitiesRequest.getEndTimeMillis());

boolean isAndFilter = executionContext.getExpressionContext().isAndFilter();
return isAndFilter
? buildAndFilterTree(entitiesRequest)
: buildFilterTree(entitiesRequest, timeRangeFilter);
return isAndFilter ? buildAndFilterTree(context) : buildFilterTree(context, timeRangeFilter);
}

@VisibleForTesting
QueryNode buildFilterTree(EntitiesRequest entitiesRequest, Filter filter) {
QueryNode buildFilterTree(EntityExecutionContext context, Filter filter) {
EntitiesRequest entitiesRequest = context.getEntitiesRequest();
if (filter.equals(Filter.getDefaultInstance())) {
return new NoOpNode();
}
Operator operator = filter.getOperator();
if (operator == Operator.AND) {
return new AndNode(
filter.getChildFilterList().stream()
.map(childFilter -> buildFilterTree(entitiesRequest, childFilter))
.map(childFilter -> buildFilterTree(context, childFilter))
.collect(Collectors.toList()));
} else if (operator == Operator.OR) {
return new OrNode(
filter.getChildFilterList().stream()
.map(childFilter -> buildFilterTree(entitiesRequest, childFilter))
.map(childFilter -> buildFilterTree(context, childFilter))
.collect(Collectors.toList()));
} else {
List<AttributeSource> sources = getAttributeSources(filter.getLhs());
List<AttributeSource> sources =
context.getExpressionContext().getAttributeSources(filter.getLhs());
// if the filter by and order by are from QS, pagination can be pushed down to QS

// There will always be a DataFetcherNode for QS, because the results are always fetched
Expand All @@ -319,7 +306,8 @@ QueryNode buildFilterTree(EntitiesRequest entitiesRequest, Filter filter) {
}

// filters and order by on QS, but you can still have selection on EDS
QueryNode buildAndFilterTree(EntitiesRequest entitiesRequest) {
QueryNode buildAndFilterTree(EntityExecutionContext context) {
EntitiesRequest entitiesRequest = context.getEntitiesRequest();
// If the filter by and order by are from QS (and selections are on other sources), pagination
// can be pushed down to QS
// Since the filter and order by are from QS, there won't be any filter on other
Expand All @@ -330,7 +318,7 @@ QueryNode buildAndFilterTree(EntitiesRequest entitiesRequest) {
}

Map<AttributeSource, Filter> sourceToAndFilterMap =
new HashMap<>(buildSourceToAndFilterMap(entitiesRequest.getFilter()));
Map.copyOf(context.getExpressionContext().getSourceToFilterMap());
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

returns an unmodifiable map. The map is being modified on L328

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's correct. Moved it back to HashMap


// qs node as the pivot node to fetch time range data
QueryNode qsNode =
Expand Down Expand Up @@ -387,37 +375,6 @@ QueryNode buildAndFilterTree(EntitiesRequest entitiesRequest) {
}
}

private Map<AttributeSource, Filter> buildSourceToAndFilterMap(Filter filter) {
Operator operator = filter.getOperator();
if (operator == Operator.AND) {
return filter.getChildFilterList().stream()
.map(this::buildSourceToAndFilterMap)
.flatMap(map -> map.entrySet().stream())
.collect(
Collectors.toUnmodifiableMap(
Entry::getKey,
Entry::getValue,
(value1, value2) ->
Filter.newBuilder()
.setOperator(Operator.AND)
.addChildFilter(value1)
.addChildFilter(value2)
.build()));

} else if (operator == Operator.OR) {
return Collections.emptyMap();
} else {
List<AttributeSource> attributeSources = getAttributeSources(filter.getLhs());
if (attributeSources.isEmpty()) {
return emptyMap();
}

return attributeSources.contains(QS)
? Map.of(QS, filter)
: Map.of(attributeSources.get(0), filter);
}
}

private QueryNode checkAndAddSortAndPaginationNode(
QueryNode childNode, EntityExecutionContext executionContext) {
EntitiesRequest entitiesRequest = executionContext.getEntitiesRequest();
Expand Down Expand Up @@ -479,12 +436,4 @@ private QueryNode createQsDataFetcherNodeWithLimitAndOffset(EntitiesRequest enti
private QueryNode createPaginateOnlyNode(QueryNode queryNode, EntitiesRequest entitiesRequest) {
return new PaginateOnlyNode(queryNode, entitiesRequest.getLimit(), entitiesRequest.getOffset());
}

public List<AttributeSource> getAttributeSources(Expression expression) {
Set<String> attributeIds = ExpressionReader.extractAttributeIds(expression);
return attributeIds.stream()
.map(attributeId -> attributeMetadataMap.get(attributeId).getSourcesList())
.flatMap(Collection::stream)
.collect(Collectors.toUnmodifiableList());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,13 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.hypertrace.core.attribute.service.v1.AttributeSource;
import org.hypertrace.gateway.service.common.datafetcher.EntityFetcherResponse;
import org.hypertrace.gateway.service.common.datafetcher.EntityResponse;
import org.hypertrace.gateway.service.common.datafetcher.IEntityFetcher;
Expand Down Expand Up @@ -265,7 +267,7 @@ public EntityResponse visit(SelectionNode selectionNode) {
.getExpressionContext()
.getSourceToSelectionExpressionMap()
.get(source))
.setFilter(filter)
.setFilter(addSourceFilters(executionContext, source, filter))
.build();
IEntityFetcher entityFetcher = queryHandlerRegistry.getEntityFetcher(source);
EntitiesRequestContext context =
Expand Down Expand Up @@ -295,7 +297,7 @@ public EntityResponse visit(SelectionNode selectionNode) {
.getExpressionContext()
.getSourceToMetricExpressionMap()
.get(source))
.setFilter(filter)
.setFilter(addSourceFilters(executionContext, source, filter))
.build();
IEntityFetcher entityFetcher = queryHandlerRegistry.getEntityFetcher(source);
EntitiesRequestContext context =
Expand Down Expand Up @@ -325,7 +327,7 @@ public EntityResponse visit(SelectionNode selectionNode) {
.getExpressionContext()
.getSourceToTimeAggregationMap()
.get(source))
.setFilter(filter)
.setFilter(addSourceFilters(executionContext, source, filter))
skjindal93 marked this conversation as resolved.
Show resolved Hide resolved
.build();
IEntityFetcher entityFetcher = queryHandlerRegistry.getEntityFetcher(source);
EntitiesRequestContext requestContext =
Expand Down Expand Up @@ -355,6 +357,25 @@ public EntityResponse visit(SelectionNode selectionNode) {
}
}

private Filter addSourceFilters(
EntityExecutionContext executionContext, String source, Filter filter) {
Optional<Filter> sourceFilterOptional =
Optional.ofNullable(
executionContext
.getExpressionContext()
.getSourceToFilterMap()
.get(AttributeSource.valueOf(source)));
return sourceFilterOptional
.map(
sourceFilter ->
Filter.newBuilder()
.setOperator(Operator.AND)
.addChildFilter(filter)
.addChildFilter(sourceFilter)
.build())
.orElse(filter);
}

Filter constructFilterFromChildNodesResult(EntityFetcherResponse result) {
if (result.isEmpty()) {
return Filter.getDefaultInstance();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import java.io.Reader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
Expand All @@ -39,6 +38,7 @@
import org.hypertrace.gateway.service.common.config.ScopeFilterConfigs;
import org.hypertrace.gateway.service.common.util.QueryServiceClient;
import org.hypertrace.gateway.service.entity.config.EntityIdColumnsConfig;
import org.hypertrace.gateway.service.entity.config.LogConfig;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.params.ParameterizedTest;
Expand Down Expand Up @@ -82,14 +82,19 @@ public static void setUp() throws IOException {
+ " }\n"
+ " ]\n"
+ " }\n"
+ "]";
+ "]\n"
+ "entity.service.log.config = {\n"
+ " query.threshold.millis = 1500\n"
+ "}\n";
Config config = ConfigFactory.parseString(scopeFiltersConfig);
scopeFilterConfigs = new ScopeFilterConfigs(config);
entityIdColumnsConfig = new EntityIdColumnsConfig(Collections.emptyMap());
entityIdColumnsConfig = new EntityIdColumnsConfig(Map.of("BACKEND", "id"));
gatewayServiceConfig = mock(GatewayServiceConfig.class);
when(gatewayServiceConfig.getEntityIdColumnsConfig()).thenReturn(entityIdColumnsConfig);
when(gatewayServiceConfig.getScopeFilterConfigs()).thenReturn(scopeFilterConfigs);
entityTypesProvider = mock(EntityTypesProvider.class);
LogConfig logConfig = new LogConfig(config);
when(gatewayServiceConfig.getLogConfig()).thenReturn(logConfig);
}

private static Reader readResourceFile(String fileName) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@
import org.hypertrace.gateway.service.v1.entity.EntitiesRequest;
import org.junit.jupiter.api.Test;

public class EntityServiceAndGatewayServiceConverterTest extends AbstractGatewayServiceTest {
class EntityServiceAndGatewayServiceConverterTest extends AbstractGatewayServiceTest {

@Test
public void testAddBetweenFilter() {
void testAddBetweenFilter() {
int startTimeMillis = 1;
int endTimeMillis = 2;
String timestamp = "lastActivity";
String timestamp = "startTime";
String timestampAttributeName = BACKEND.name() + "." + timestamp;

Expression.Builder expectedStartTimeConstant =
Expand Down
Loading
Loading