forked from datahub-project/datahub
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(cache): add hazelcast distributed cache option (datahub-project#…
…6645) Co-authored-by: Aseem Bansal <[email protected]>
- Loading branch information
1 parent
860fee2
commit 81db349
Showing
12 changed files
with
316 additions
and
73 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
17 changes: 15 additions & 2 deletions
17
metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,11 +1,24 @@ | ||
package com.linkedin.metadata.search.cache; | ||
|
||
import com.linkedin.metadata.graph.EntityLineageResult; | ||
import java.io.Serializable; | ||
import lombok.Data; | ||
|
||
import static com.datahub.util.RecordUtils.*; | ||
import static com.linkedin.metadata.search.utils.GZIPUtil.*; | ||
|
||
|
||
@Data | ||
public class CachedEntityLineageResult { | ||
private final EntityLineageResult entityLineageResult; | ||
public class CachedEntityLineageResult implements Serializable { | ||
private final byte[] entityLineageResult; | ||
private final long timestamp; | ||
|
||
public CachedEntityLineageResult(EntityLineageResult lineageResult, long timestamp) { | ||
this.entityLineageResult = gzipCompress(toJsonString(lineageResult)); | ||
this.timestamp = timestamp; | ||
} | ||
|
||
public EntityLineageResult getEntityLineageResult() { | ||
return toRecordTemplate(EntityLineageResult.class, gzipDecompress(entityLineageResult)); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
48 changes: 48 additions & 0 deletions
48
metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
package com.linkedin.metadata.search.utils; | ||
|
||
import java.io.ByteArrayInputStream; | ||
import java.io.ByteArrayOutputStream; | ||
import java.io.IOException; | ||
import java.nio.charset.StandardCharsets; | ||
import java.util.zip.GZIPInputStream; | ||
import java.util.zip.GZIPOutputStream; | ||
|
||
|
||
public class GZIPUtil { | ||
private GZIPUtil() { } | ||
|
||
public static String gzipDecompress(byte[] gzipped) { | ||
String unzipped; | ||
try (ByteArrayInputStream bis = new ByteArrayInputStream(gzipped); | ||
GZIPInputStream gis = new GZIPInputStream(bis); | ||
ByteArrayOutputStream bos = new ByteArrayOutputStream()) { | ||
byte[] buffer = new byte[1024]; | ||
int len; | ||
while ((len = gis.read(buffer)) != -1) { | ||
bos.write(buffer, 0, len); | ||
} | ||
unzipped = bos.toString(StandardCharsets.UTF_8); | ||
} catch (IOException ie) { | ||
throw new IllegalStateException("Error while unzipping value.", ie); | ||
} | ||
return unzipped; | ||
} | ||
|
||
public static byte[] gzipCompress(String unzipped) { | ||
byte[] gzipped; | ||
try (ByteArrayInputStream bis = new ByteArrayInputStream(unzipped.getBytes(StandardCharsets.UTF_8)); | ||
ByteArrayOutputStream bos = new ByteArrayOutputStream(); | ||
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(bos)) { | ||
byte[] buffer = new byte[1024]; | ||
int len; | ||
while ((len = bis.read(buffer)) != -1) { | ||
gzipOutputStream.write(buffer, 0, len); | ||
} | ||
gzipOutputStream.finish(); | ||
gzipped = bos.toByteArray(); | ||
} catch (IOException ie) { | ||
throw new IllegalStateException("Error while gzipping value: " + unzipped); | ||
} | ||
return gzipped; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
74 changes: 74 additions & 0 deletions
74
metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
package com.linkedin.gms.factory.common; | ||
|
||
import com.github.benmanes.caffeine.cache.Caffeine; | ||
import com.hazelcast.config.Config; | ||
import com.hazelcast.config.EvictionConfig; | ||
import com.hazelcast.config.EvictionPolicy; | ||
import com.hazelcast.config.MapConfig; | ||
import com.hazelcast.config.MaxSizePolicy; | ||
import com.hazelcast.core.Hazelcast; | ||
import com.hazelcast.core.HazelcastInstance; | ||
import com.hazelcast.spring.cache.HazelcastCacheManager; | ||
import java.util.concurrent.TimeUnit; | ||
import org.springframework.beans.factory.annotation.Value; | ||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; | ||
import org.springframework.cache.CacheManager; | ||
import org.springframework.cache.caffeine.CaffeineCacheManager; | ||
import org.springframework.context.annotation.Bean; | ||
import org.springframework.context.annotation.Configuration; | ||
|
||
|
||
@Configuration | ||
public class CacheConfig { | ||
|
||
@Value("${CACHE_TTL_SECONDS:600}") | ||
private int cacheTtlSeconds; | ||
|
||
@Value("${CACHE_MAX_SIZE:10000}") | ||
private int cacheMaxSize; | ||
|
||
@Value("${searchService.cache.hazelcast.serviceName:hazelcast-service}") | ||
private String hazelcastServiceName; | ||
|
||
@Bean | ||
@ConditionalOnProperty(name = "searchService.cacheImplementation", havingValue = "caffeine") | ||
public CacheManager caffeineCacheManager() { | ||
CaffeineCacheManager cacheManager = new CaffeineCacheManager(); | ||
cacheManager.setCaffeine(caffeineCacheBuilder()); | ||
return cacheManager; | ||
} | ||
|
||
private Caffeine<Object, Object> caffeineCacheBuilder() { | ||
return Caffeine.newBuilder() | ||
.initialCapacity(100) | ||
.maximumSize(cacheMaxSize) | ||
.expireAfterAccess(cacheTtlSeconds, TimeUnit.SECONDS) | ||
.recordStats(); | ||
} | ||
|
||
@Bean | ||
@ConditionalOnProperty(name = "searchService.cacheImplementation", havingValue = "hazelcast") | ||
public CacheManager hazelcastCacheManager() { | ||
Config config = new Config(); | ||
// TODO: This setting is equivalent to expireAfterAccess, refreshes timer after a get, put, containsKey etc. | ||
// is this behavior what we actually desire? Should we change it now? | ||
MapConfig mapConfig = new MapConfig().setMaxIdleSeconds(cacheTtlSeconds); | ||
|
||
EvictionConfig evictionConfig = new EvictionConfig() | ||
.setMaxSizePolicy(MaxSizePolicy.PER_NODE) | ||
.setSize(cacheMaxSize) | ||
.setEvictionPolicy(EvictionPolicy.LFU); | ||
mapConfig.setEvictionConfig(evictionConfig); | ||
mapConfig.setName("default"); | ||
config.addMapConfig(mapConfig); | ||
|
||
config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(false); | ||
config.getNetworkConfig().getJoin().getKubernetesConfig().setEnabled(true) | ||
.setProperty("service-dns", hazelcastServiceName); | ||
|
||
|
||
HazelcastInstance hazelcastInstance = Hazelcast.newHazelcastInstance(config); | ||
|
||
return new HazelcastCacheManager(hazelcastInstance); | ||
} | ||
} |
Oops, something went wrong.