Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Kafka Streams SASL and SSL config #7417

Merged
merged 2 commits into from
Jun 24, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
package io.quarkus.kafka.streams.deployment;

import static io.quarkus.kafka.streams.runtime.KafkaStreamsPropertiesUtil.buildKafkaStreamsProperties;

import java.io.IOException;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes.ByteArraySerde;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.DefaultProductionExceptionHandler;
import org.apache.kafka.streams.errors.LogAndFailExceptionHandler;
import org.apache.kafka.streams.processor.DefaultPartitionGrouper;
import org.apache.kafka.streams.processor.FailOnInvalidTimestamp;
import org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor;
import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.ConfigProvider;
import org.rocksdb.RocksDBException;
import org.rocksdb.Status;
import org.rocksdb.util.Environment;
Expand All @@ -31,17 +30,13 @@
import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem;
import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem;
import io.quarkus.deployment.pkg.NativeConfig;
import io.quarkus.kafka.streams.runtime.HotReplacementInterceptor;
import io.quarkus.kafka.streams.runtime.KafkaStreamsRecorder;
import io.quarkus.kafka.streams.runtime.KafkaStreamsRuntimeConfig;
import io.quarkus.kafka.streams.runtime.KafkaStreamsTopologyManager;
import io.quarkus.runtime.LaunchMode;
import io.quarkus.smallrye.health.deployment.spi.HealthBuildItem;

class KafkaStreamsProcessor {

private static final String STREAMS_OPTION_PREFIX = "kafka-streams.";

@BuildStep
void build(BuildProducer<FeatureBuildItem> feature,
BuildProducer<ReflectiveClassBuildItem> reflectiveClasses,
Expand Down Expand Up @@ -150,43 +145,6 @@ BeanContainerListenerBuildItem processBuildTimeConfig(KafkaStreamsRecorder recor
return new BeanContainerListenerBuildItem(recorder.configure(kafkaStreamsProperties));
}

private Properties buildKafkaStreamsProperties(LaunchMode launchMode) {
Config config = ConfigProvider.getConfig();
Properties kafkaStreamsProperties = new Properties();
for (String property : config.getPropertyNames()) {
if (isKafkaStreamsProperty(property)) {
includeKafkaStreamsProperty(config, kafkaStreamsProperties, property);
}
}

if (launchMode == LaunchMode.DEVELOPMENT) {
addHotReplacementInterceptor(kafkaStreamsProperties);
}

return kafkaStreamsProperties;
}

private void addHotReplacementInterceptor(Properties kafkaStreamsProperties) {
String interceptorConfig = HotReplacementInterceptor.class.getName();
Object originalInterceptorConfig = kafkaStreamsProperties
.get(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG));

if (originalInterceptorConfig != null) {
interceptorConfig = interceptorConfig + "," + originalInterceptorConfig;
}

kafkaStreamsProperties.put(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG), interceptorConfig);
}

private boolean isKafkaStreamsProperty(String property) {
return property.startsWith(STREAMS_OPTION_PREFIX);
}

private void includeKafkaStreamsProperty(Config config, Properties kafkaStreamsProperties, String property) {
kafkaStreamsProperties.setProperty(property.substring(STREAMS_OPTION_PREFIX.length()),
config.getValue(property, String.class));
}

@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
void configureAndLoadRocksDb(KafkaStreamsRecorder recorder, KafkaStreamsRuntimeConfig runtimeConfig) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
package io.quarkus.kafka.streams.runtime;

import java.util.Optional;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.streams.StreamsConfig;
import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.ConfigProvider;

import io.quarkus.runtime.LaunchMode;

public class KafkaStreamsPropertiesUtil {

private static final String STREAMS_OPTION_PREFIX = "kafka-streams.";
private static final String QUARKUS_STREAMS_OPTION_PREFIX = "quarkus." + STREAMS_OPTION_PREFIX;

private static boolean isKafkaStreamsProperty(String prefix, String property) {
return property.startsWith(prefix);
}

private static void includeKafkaStreamsProperty(Config config, Properties kafkaStreamsProperties, String prefix,
String property) {
Optional<String> value = config.getOptionalValue(property, String.class);
if (value.isPresent()) {
kafkaStreamsProperties.setProperty(property.substring(prefix.length()), value.get());
}
}

private static void addHotReplacementInterceptor(Properties kafkaStreamsProperties) {
String interceptorConfig = HotReplacementInterceptor.class.getName();
Object originalInterceptorConfig = kafkaStreamsProperties
.get(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG));

if (originalInterceptorConfig != null) {
interceptorConfig = interceptorConfig + "," + originalInterceptorConfig;
}

kafkaStreamsProperties.put(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG), interceptorConfig);
}

private static Properties kafkaStreamsProperties(String prefix) {
Properties kafkaStreamsProperties = new Properties();
Config config = ConfigProvider.getConfig();
for (String property : config.getPropertyNames()) {
if (isKafkaStreamsProperty(prefix, property)) {
includeKafkaStreamsProperty(config, kafkaStreamsProperties, prefix, property);
}
}

return kafkaStreamsProperties;
}

public static Properties appKafkaStreamsProperties() {
return kafkaStreamsProperties(STREAMS_OPTION_PREFIX);
}

public static Properties quarkusKafkaStreamsProperties() {
return kafkaStreamsProperties(QUARKUS_STREAMS_OPTION_PREFIX);
}

public static Properties buildKafkaStreamsProperties(LaunchMode launchMode) {
Properties kafkaStreamsProperties = appKafkaStreamsProperties();

if (launchMode == LaunchMode.DEVELOPMENT) {
addHotReplacementInterceptor(kafkaStreamsProperties);
}

return kafkaStreamsProperties;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import org.rocksdb.RocksDB;

import io.quarkus.arc.Arc;
import io.quarkus.arc.runtime.BeanContainer;
import io.quarkus.arc.runtime.BeanContainerListener;
import io.quarkus.runtime.annotations.Recorder;

Expand All @@ -20,9 +21,13 @@ public void configureRuntimeProperties(KafkaStreamsRuntimeConfig runtimeConfig)
}

public BeanContainerListener configure(Properties properties) {
return container -> {
KafkaStreamsTopologyManager instance = container.instance(KafkaStreamsTopologyManager.class);
instance.configure(properties);
return new BeanContainerListener() {

@Override
public void created(BeanContainer container) {
KafkaStreamsTopologyManager instance = container.instance(KafkaStreamsTopologyManager.class);
instance.configure(properties);
}
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,53 @@ public class KafkaStreamsRuntimeConfig {
@ConfigItem
public List<String> topics;

/**
* The schema registry key.
*
* e.g. to diff between different registry impls / instances
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Perhaps clarify the value to be used for Apicurio?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OK, will do.

* as they have this registry url under different property key.
*
* Red Hat / Apicurio - apicurio.registry.url
* Confluent - schema.registry.url
*/
@ConfigItem(defaultValue = "schema.registry.url")
public String schemaRegistryKey;

/**
* The schema registry url.
*/
@ConfigItem
public Optional<String> schemaRegistryUrl;

/**
* The security protocol to use
* See https://docs.confluent.io/current/streams/developer-guide/security.html#security-example
*/
@ConfigItem(name = "security.protocol")
public Optional<String> securityProtocol;

/**
* The SASL JAAS config.
*/
public SaslConfig sasl;

/**
* Kafka SSL config
*/
public SslConfig ssl;

@Override
public String toString() {
return "KafkaStreamsRuntimeConfig [applicationId=" + applicationId + ", bootstrapServers=" + bootstrapServers
+ ", applicationServer=" + applicationServer + ", topics=" + topics + "]";
return "KafkaStreamsRuntimeConfig{" +
"applicationId='" + applicationId + '\'' +
", bootstrapServers=" + bootstrapServers +
", applicationServer=" + applicationServer +
", topics=" + topics +
", schemaRegistryKey='" + schemaRegistryKey + '\'' +
", schemaRegistryUrl=" + schemaRegistryUrl +
", sasl=" + sasl +
", ssl=" + ssl +
'}';
}

public List<String> getTrimmedTopics() {
Expand Down
Loading