Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(metrics): add metrics around search caching #6255

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo
public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn,
@Nonnull RecommendationRequestContext requestContext) {
SearchRequest searchRequest = buildSearchRequest(userUrn);
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyViewed").time()) {
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlySearched").time()) {
final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT);
// extract results
ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
package com.linkedin.metadata.search.cache;

import com.codahale.metrics.Timer;
import com.linkedin.metadata.query.SearchFlags;
import com.linkedin.metadata.search.SearchEntity;
import com.linkedin.metadata.search.SearchEntityArray;
import com.linkedin.metadata.search.SearchResult;
import com.linkedin.metadata.utils.metrics.MetricUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Function;
Expand Down Expand Up @@ -42,57 +44,66 @@ public static class QueryPagination {
* This let's us have batches that return a variable number of results (we have no idea which batch the "from" "size" page corresponds to)
*/
public SearchResult getSearchResults(int from, int size) {
int resultsSoFar = 0;
int batchId = 0;
boolean foundStart = false;
List<SearchEntity> resultEntities = new ArrayList<>();
SearchResult batchedResult;
// Use do-while to make sure we run at least one batch to fetch metadata
do {
batchedResult = getBatch(batchId);
int currentBatchSize = batchedResult.getEntities().size();
// If the number of results in this batch is 0, no need to continue
if (currentBatchSize == 0) {
break;
}
if (resultsSoFar + currentBatchSize > from) {
int startInBatch = foundStart ? 0 : from - resultsSoFar;
int endInBatch = Math.min(currentBatchSize, startInBatch + size - resultEntities.size());
resultEntities.addAll(batchedResult.getEntities().subList(startInBatch, endInBatch));
foundStart = true;
}
// If current batch is smaller than the requested batch size, the next batch will return empty.
if (currentBatchSize < batchSize) {
break;
}
resultsSoFar += currentBatchSize;
batchId++;
} while (resultsSoFar < from + size);
return new SearchResult().setEntities(new SearchEntityArray(resultEntities))
.setMetadata(batchedResult.getMetadata())
.setFrom(from)
.setPageSize(size)
.setNumEntities(batchedResult.getNumEntities());
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getSearchResults").time()) {
int resultsSoFar = 0;
int batchId = 0;
boolean foundStart = false;
List<SearchEntity> resultEntities = new ArrayList<>();
SearchResult batchedResult;
// Use do-while to make sure we run at least one batch to fetch metadata
do {
batchedResult = getBatch(batchId);
int currentBatchSize = batchedResult.getEntities().size();
// If the number of results in this batch is 0, no need to continue
if (currentBatchSize == 0) {
break;
}
if (resultsSoFar + currentBatchSize > from) {
int startInBatch = foundStart ? 0 : from - resultsSoFar;
int endInBatch = Math.min(currentBatchSize, startInBatch + size - resultEntities.size());
resultEntities.addAll(batchedResult.getEntities().subList(startInBatch, endInBatch));
foundStart = true;
}
// If current batch is smaller than the requested batch size, the next batch will return empty.
if (currentBatchSize < batchSize) {
break;
}
resultsSoFar += currentBatchSize;
batchId++;
} while (resultsSoFar < from + size);
return new SearchResult().setEntities(new SearchEntityArray(resultEntities))
.setMetadata(batchedResult.getMetadata())
.setFrom(from)
.setPageSize(size)
.setNumEntities(batchedResult.getNumEntities());
}
}

private QueryPagination getBatchQuerySize(int batchId) {
return new QueryPagination(batchId * batchSize, batchSize);
}

private SearchResult getBatch(int batchId) {
QueryPagination batch = getBatchQuerySize(batchId);
SearchResult result;
if (enableCache()) {
K cacheKey = cacheKeyGenerator.apply(batch);
result = cache.get(cacheKey, SearchResult.class);
if (result == null) {
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getBatch").time()) {
QueryPagination batch = getBatchQuerySize(batchId);
SearchResult result;
if (enableCache()) {
Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "getBatch_cache_access").time();
K cacheKey = cacheKeyGenerator.apply(batch);
result = cache.get(cacheKey, SearchResult.class);
cacheAccess.stop();
if (result == null) {
Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "getBatch_cache_miss").time();
result = searcher.apply(batch);
cache.put(cacheKey, result);
cacheMiss.stop();
MetricUtils.counter(this.getClass(), "getBatch_cache_miss_count").inc();
}
} else {
result = searcher.apply(batch);
cache.put(cacheKey, result);
}
} else {
result = searcher.apply(batch);
return result;
}
return result;
}

private boolean enableCache() {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.linkedin.metadata.search.client;

import com.codahale.metrics.Timer;
import com.linkedin.metadata.browse.BrowseResult;
import com.linkedin.metadata.query.AutoCompleteResult;
import com.linkedin.metadata.query.SearchFlags;
Expand All @@ -8,6 +9,7 @@
import com.linkedin.metadata.search.EntitySearchService;
import com.linkedin.metadata.search.SearchResult;
import com.linkedin.metadata.search.cache.CacheableSearcher;
import com.linkedin.metadata.utils.metrics.MetricUtils;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
Expand Down Expand Up @@ -127,31 +129,26 @@ public AutoCompleteResult getCachedAutoCompleteResults(
@Nullable Filter filters,
int limit,
@Nullable SearchFlags flags) {
Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME);
AutoCompleteResult result;
if (enableCache(flags)) {
Object cacheKey = Quintet.with(entityName, input, field, filters, limit);
result = cache.get(cacheKey, AutoCompleteResult.class);
if (result == null) {
result = getRawAutoCompleteResults(
entityName,
input,
field,
filters,
limit
);
cache.put(cacheKey, result);
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults").time()) {
Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME);
AutoCompleteResult result;
if (enableCache(flags)) {
Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "autocomplete_cache_access").time();
Object cacheKey = Quintet.with(entityName, input, field, filters, limit);
result = cache.get(cacheKey, AutoCompleteResult.class);
cacheAccess.stop();
if (result == null) {
Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "autocomplete_cache_miss").time();
result = getRawAutoCompleteResults(entityName, input, field, filters, limit);
cache.put(cacheKey, result);
cacheMiss.stop();
MetricUtils.counter(this.getClass(), "autocomplete_cache_miss_count").inc();
}
} else {
result = getRawAutoCompleteResults(entityName, input, field, filters, limit);
}
} else {
result = getRawAutoCompleteResults(
entityName,
input,
field,
filters,
limit
);
return result;
}
return result;
}

/**
Expand All @@ -164,31 +161,26 @@ public BrowseResult getCachedBrowseResults(
int from,
int size,
@Nullable SearchFlags flags) {
Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_BROWSE_CACHE_NAME);
BrowseResult result;
if (enableCache(flags)) {
Object cacheKey = Quintet.with(entityName, path, filters, from, size);
result = cache.get(cacheKey, BrowseResult.class);
if (result == null) {
result = getRawBrowseResults(
entityName,
path,
filters,
from,
size
);
cache.put(cacheKey, result);
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedBrowseResults").time()) {
Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_BROWSE_CACHE_NAME);
BrowseResult result;
if (enableCache(flags)) {
Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "browse_cache_access").time();
Object cacheKey = Quintet.with(entityName, path, filters, from, size);
result = cache.get(cacheKey, BrowseResult.class);
cacheAccess.stop();
if (result == null) {
Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "browse_cache_miss").time();
result = getRawBrowseResults(entityName, path, filters, from, size);
cache.put(cacheKey, result);
cacheMiss.stop();
MetricUtils.counter(this.getClass(), "browse_cache_miss_count").inc();
}
} else {
result = getRawBrowseResults(entityName, path, filters, from, size);
}
} else {
result = getRawBrowseResults(
entityName,
path,
filters,
from,
size
);
return result;
}
return result;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ public long docCount(@Nonnull String entityName) {
@WithSpan
private SearchResult executeAndExtract(@Nonnull EntitySpec entitySpec, @Nonnull SearchRequest searchRequest, @Nullable Filter filter, int from,
int size) {
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esSearch").time()) {
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "executeAndExtract_search").time()) {
final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
// extract results, validated against document model as well
return SearchRequestHandler.getBuilder(entitySpec).extractResult(searchResponse, filter, from, size);
Expand Down Expand Up @@ -168,7 +168,7 @@ public Map<String, Long> aggregateByValue(@Nullable String entityName, @Nonnull
}
searchRequest.indices(indexName);

try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esSearch").time()) {
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "aggregateByValue_search").time()) {
final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
// extract results, validated against document model as well
return SearchRequestHandler.extractTermAggregations(searchResponse, field);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ public List<EnvelopedAspect> getAspectValues(@Nonnull final Urn urn, @Nonnull St

log.debug("Search request is: " + searchRequest);
SearchHits hits;
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esSearch").time()) {
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "searchAspectValues_search").time()) {
final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT);
hits = searchResponse.getHits();
} catch (Exception e) {
Expand Down