diff --git a/.travis/build.sh b/.travis/build.sh index a286ae98..45e0b697 100755 --- a/.travis/build.sh +++ b/.travis/build.sh @@ -68,24 +68,24 @@ elif [[ "$arch" != 'ppc64le' ]]; then exitIfError clearDockerEnv - mvn -e -V -B clean install -f testsuite -Pkafka-3_2_3 + mvn -e -V -B clean install -f testsuite -Pkafka-3_2_3 -DfailIfNoTests=false -Dtest=\!KeycloakRaftAuthorizationTests EXIT=$? exitIfError # Excluded by default to not exceed Travis job timeout if [ "SKIP_DISABLED" == "false" ]; then clearDockerEnv - mvn -e -V -B clean install -f testsuite -Pkafka-3_1_2 + mvn -e -V -B clean install -f testsuite -Pkafka-3_1_2 -DfailIfNoTests=false -Dtest=\!KeycloakRaftAuthorizationTests EXIT=$? exitIfError clearDockerEnv - mvn -e -V -B clean install -f testsuite -Pkafka-3_0_0 + mvn -e -V -B clean install -f testsuite -Pkafka-3_0_0 -DfailIfNoTests=false -Dtest=\!KeycloakRaftAuthorizationTests EXIT=$? exitIfError clearDockerEnv - mvn -e -V -B clean install -f testsuite -Pkafka-2_8_1 + mvn -e -V -B clean install -f testsuite -Pkafka-2_8_1 -DfailIfNoTests=false -Dtest=\!KeycloakRaftAuthorizationTests EXIT=$? exitIfError fi diff --git a/examples/docker/kafka-oauth-strimzi/compose-kraft.yml b/examples/docker/kafka-oauth-strimzi/compose-kraft.yml new file mode 100644 index 00000000..5d817551 --- /dev/null +++ b/examples/docker/kafka-oauth-strimzi/compose-kraft.yml @@ -0,0 +1,81 @@ +version: '3.5' + +services: + + #################################### KAFKA BROKER #################################### + kafka: + image: strimzi/example-kafka + build: kafka-oauth-strimzi/kafka/target + container_name: kafka + ports: + - 9091:9091 + - 9092:9092 + + # javaagent debug port + #- 5005:5005 + command: + - /bin/bash + - -c + - cd /opt/kafka && ./start.sh --kraft + + environment: + + # Java Debug + #KAFKA_DEBUG: y + #DEBUG_SUSPEND_FLAG: y + #JAVA_DEBUG_PORT: 5005 + + # + # KAFKA Configuration + # + LOG_DIR: /home/kafka/logs + KAFKA_PROCESS_ROLES: "broker,controller" + KAFKA_NODE_ID: "1" + KAFKA_CONTROLLER_QUORUM_VOTERS: "1@kafka:9091" + KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER + KAFKA_SASL_MECHANISM_CONTROLLER_PROTOCOL: PLAIN + + KAFKA_LISTENERS: "CONTROLLER://kafka:9091,CLIENT://kafka:9092" + KAFKA_ADVERTISED_LISTENERS: "CLIENT://kafka:9092" + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "CONTROLLER:SASL_PLAINTEXT,CLIENT:SASL_PLAINTEXT" + + KAFKA_INTER_BROKER_LISTENER_NAME: CLIENT + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: OAUTHBEARER + + KAFKA_PRINCIPAL_BUILDER_CLASS: "io.strimzi.kafka.oauth.server.OAuthKafkaPrincipalBuilder" + + KAFKA_LISTENER_NAME_CONTROLLER_SASL_ENABLED_MECHANISMS: PLAIN + KAFKA_LISTENER_NAME_CONTROLLER_PLAIN_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin-password\" user_admin=\"admin-password\" user_bobby=\"bobby-secret\" ;" + + KAFKA_LISTENER_NAME_CLIENT_SASL_ENABLED_MECHANISMS: OAUTHBEARER + KAFKA_LISTENER_NAME_CLIENT_OAUTHBEARER_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required;" + KAFKA_LISTENER_NAME_CLIENT_OAUTHBEARER_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler + KAFKA_LISTENER_NAME_CLIENT_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS: io.strimzi.kafka.oauth.server.JaasServerOauthValidatorCallbackHandler + + KAFKA_SUPER_USERS: "User:admin,User:service-account-kafka-broker" + + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + + + # + # Strimzi OAuth Configuration + # + + # Authentication config + OAUTH_CLIENT_ID: "kafka-broker" + OAUTH_CLIENT_SECRET: "kafka-broker-secret" + OAUTH_TOKEN_ENDPOINT_URI: "http://${KEYCLOAK_HOST:-keycloak}:8080/auth/realms/${REALM:-demo}/protocol/openid-connect/token" + + # Validation config + OAUTH_VALID_ISSUER_URI: "http://${KEYCLOAK_HOST:-keycloak}:8080/auth/realms/${REALM:-demo}" + OAUTH_JWKS_ENDPOINT_URI: "http://${KEYCLOAK_HOST:-keycloak}:8080/auth/realms/${REALM:-demo}/protocol/openid-connect/certs" + #OAUTH_INTROSPECTION_ENDPOINT_URI: "http://${KEYCLOAK_HOST:-keycloak}:8080/auth/realms/${REALM:-demo}/protocol/openid-connect/token/introspect" + + + # username extraction from JWT token claim + OAUTH_USERNAME_CLAIM: preferred_username + OAUTH_CONNECT_TIMEOUT_SECONDS: "20" + + # For start.sh script to know where the keycloak is listening + KEYCLOAK_HOST: ${KEYCLOAK_HOST:-keycloak} + REALM: ${REALM:-demo} diff --git a/examples/docker/kafka-oauth-strimzi/kafka/simple_kafka_config.sh b/examples/docker/kafka-oauth-strimzi/kafka/simple_kafka_config.sh index 2ae04f26..38787ea3 100755 --- a/examples/docker/kafka-oauth-strimzi/kafka/simple_kafka_config.sh +++ b/examples/docker/kafka-oauth-strimzi/kafka/simple_kafka_config.sh @@ -52,29 +52,57 @@ done # # Generate output # -echo "#" -echo "# strimzi.properties" -echo "#" -echo broker.id=`pop_value broker.id 0` -echo num.network.threads=`pop_value num.network.threads 3` -echo num.io.threads=`pop_value num.io.threads 8` -echo socket.send.buffer.bytes=`pop_value socket.send.buffer.bytes 102400` -echo socket.receive.buffer.bytes=`pop_value socket.receive.buffer.bytes 102400` -echo socket.request.max.bytes=`pop_value socket.request.max.bytes 104857600` -echo log.dirs=`pop_value log.dirs /tmp/kafka-logs` -echo num.partitions=`pop_value num.partitions 1` -echo num.recovery.threads.per.data.dir=`pop_value num.recovery.threads.per.data.dir 1` -echo offsets.topic.replication.factor=`pop_value offsets.topic.replication.factor 1` -echo transaction.state.log.replication.factor=`pop_value transaction.state.log.replication.factor 1` -echo transaction.state.log.min.isr=`pop_value transaction.state.log.min.isr 1` -echo log.retention.hours=`pop_value log.retention.hours 168` -echo log.segment.bytes=`pop_value log.segment.bytes 1073741824` -echo log.retention.check.interval.ms=`pop_value log.retention.check.interval.ms 300000` -echo zookeeper.connect=`pop_value zookeeper.connect localhost:2181` -echo zookeeper.connection.timeout.ms=`pop_value zookeeper.connection.timeout.ms 6000` -echo group.initial.rebalance.delay.ms=`pop_value group.initial.rebalance.delay.ms 0` +if [[ "$1" == "--kraft" ]]; then + # + # Output kraft version of server.properties + # + echo "#" + echo "# strimzi.properties (kraft)" + echo "#" + echo process.roles=`pop_value process.roles broker,controller` + echo node.id=`pop_value node.id 1` + echo num.network.threads=`pop_value num.network.threads 3` + echo num.io.threads=`pop_value num.io.threads 8` + echo socket.send.buffer.bytes=`pop_value socket.send.buffer.bytes 102400` + echo socket.receive.buffer.bytes=`pop_value socket.receive.buffer.bytes 102400` + echo socket.request.max.bytes=`pop_value socket.request.max.bytes 104857600` + echo log.dirs=`pop_value log.dirs /tmp/kraft-combined-logs` + echo num.partitions=`pop_value num.partitions 1` + echo num.recovery.threads.per.data.dir=`pop_value num.recovery.threads.per.data.dir 1` + echo offsets.topic.replication.factor=`pop_value offsets.topic.replication.factor 1` + echo transaction.state.log.replication.factor=`pop_value transaction.state.log.replication.factor 1` + echo transaction.state.log.min.isr=`pop_value transaction.state.log.min.isr 1` + echo log.retention.hours=`pop_value log.retention.hours 168` + echo log.segment.bytes=`pop_value log.segment.bytes 1073741824` + echo log.retention.check.interval.ms=`pop_value log.retention.check.interval.ms 300000` + +elif [[ "$1" == "" ]]; then + echo "#" + echo "# strimzi.properties" + echo "#" + + echo broker.id=`pop_value broker.id 0` + echo num.network.threads=`pop_value num.network.threads 3` + echo num.io.threads=`pop_value num.io.threads 8` + echo socket.send.buffer.bytes=`pop_value socket.send.buffer.bytes 102400` + echo socket.receive.buffer.bytes=`pop_value socket.receive.buffer.bytes 102400` + echo socket.request.max.bytes=`pop_value socket.request.max.bytes 104857600` + echo log.dirs=`pop_value log.dirs /tmp/kafka-logs` + echo num.partitions=`pop_value num.partitions 1` + echo num.recovery.threads.per.data.dir=`pop_value num.recovery.threads.per.data.dir 1` + echo offsets.topic.replication.factor=`pop_value offsets.topic.replication.factor 1` + echo transaction.state.log.replication.factor=`pop_value transaction.state.log.replication.factor 1` + echo transaction.state.log.min.isr=`pop_value transaction.state.log.min.isr 1` + echo log.retention.hours=`pop_value log.retention.hours 168` + echo log.segment.bytes=`pop_value log.segment.bytes 1073741824` + echo log.retention.check.interval.ms=`pop_value log.retention.check.interval.ms 300000` + echo group.initial.rebalance.delay.ms=`pop_value group.initial.rebalance.delay.ms 0` +else + echo "Unsupported argument: $1" + exit 1 +fi # # Add what remains of KAFKA_* env vars # diff --git a/examples/docker/kafka-oauth-strimzi/kafka/start.sh b/examples/docker/kafka-oauth-strimzi/kafka/start.sh index 12d1a4b2..9c483c8c 100755 --- a/examples/docker/kafka-oauth-strimzi/kafka/start.sh +++ b/examples/docker/kafka-oauth-strimzi/kafka/start.sh @@ -14,12 +14,17 @@ wait_for_url "$URI/realms/${REALM:-demo}" "Waiting for realm '${REALM}' to be av if [ "$SERVER_PROPERTIES_FILE" == "" ]; then echo "Generating a new strimzi.properties file using ENV vars" - ./simple_kafka_config.sh | tee /tmp/strimzi.properties + ./simple_kafka_config.sh $1 | tee /tmp/strimzi.properties else echo "Using provided server.properties file: $SERVER_PROPERTIES_FILE" cp $SERVER_PROPERTIES_FILE /tmp/strimzi.properties fi +if [[ "$1" == "--kraft" ]]; then + KAFKA_CLUSTER_ID="$(/opt/kafka/bin/kafka-storage.sh random-uuid)" + /opt/kafka/bin/kafka-storage.sh format -t $KAFKA_CLUSTER_ID -c /tmp/strimzi.properties +fi + # add Strimzi kafka-oauth-* jars and their dependencies to classpath export CLASSPATH="/opt/kafka/libs/strimzi/*:$CLASSPATH" diff --git a/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/BearerTokenWithPayload.java b/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/BearerTokenWithPayload.java index 56820f14..d07437c0 100644 --- a/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/BearerTokenWithPayload.java +++ b/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/BearerTokenWithPayload.java @@ -4,6 +4,7 @@ */ package io.strimzi.kafka.oauth.common; +import com.fasterxml.jackson.databind.JsonNode; import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken; import com.fasterxml.jackson.databind.node.ObjectNode; import java.util.Set; @@ -24,18 +25,18 @@ public interface BearerTokenWithPayload extends OAuthBearerToken { /** - * Get the usage dependent object previously associated with this instance by calling {@link BearerTokenWithPayload#setPayload(Object)} + * Get the usage dependent object previously associated with this instance by calling {@link BearerTokenWithPayload#setPayload(com.fasterxml.jackson.databind.JsonNode)} * * @return The associated object */ - Object getPayload(); + JsonNode getPayload(); /** * Associate a usage dependent object with this instance * * @param payload The object to associate with this instance */ - void setPayload(Object payload); + void setPayload(JsonNode payload); /** * Get groups associated with this token (principal). diff --git a/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/Config.java b/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/Config.java index 13d00513..dd6995a8 100644 --- a/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/Config.java +++ b/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/Config.java @@ -84,6 +84,8 @@ public class Config { private Map defaults; + Config delegate; + /** * Use this construtor if you only want to lookup configuration in system properties and env * without any default configuration. @@ -104,6 +106,15 @@ public Config(Properties p) { )); } + /** + * Use this constructor if you want to wrap another Config object and override some functionality that way + * + * @param delegate The Config object to delegate to + */ + public Config(Config delegate) { + this.delegate = delegate; + } + /** * Use this constructor to provide default values in case some configuration is not set through system properties or ENV. * @@ -140,6 +151,9 @@ public void validate() {} * @return Configuration value for specified key */ public String getValue(String key, String fallback) { + if (delegate != null) { + return delegate.getValue(key, fallback); + } // try system properties first String result = System.getProperty(key, null); @@ -174,7 +188,7 @@ public String getValue(String key, String fallback) { * @param key Config key * @return Config value */ - public String getValue(String key) { + public final String getValue(String key) { return getValue(key, null); } @@ -185,7 +199,7 @@ public String getValue(String key) { * @param fallback Fallback value * @return Config value */ - public int getValueAsInt(String key, int fallback) { + public final int getValueAsInt(String key, int fallback) { String result = getValue(key); return result != null ? Integer.parseInt(result) : fallback; } @@ -197,7 +211,7 @@ public int getValueAsInt(String key, int fallback) { * @param fallback Fallback value * @return Config value */ - public long getValueAsLong(String key, long fallback) { + public final long getValueAsLong(String key, long fallback) { String result = getValue(key); return result != null ? Long.parseLong(result) : fallback; } @@ -211,7 +225,7 @@ public long getValueAsLong(String key, long fallback) { * @param fallback Fallback value * @return Config value */ - public boolean getValueAsBoolean(String key, boolean fallback) { + public final boolean getValueAsBoolean(String key, boolean fallback) { String result = getValue(key); try { return result != null ? isTrue(result) : fallback; @@ -226,7 +240,7 @@ public boolean getValueAsBoolean(String key, boolean fallback) { * @param key Config key * @return Config value */ - public URI getValueAsURI(String key) { + public final URI getValueAsURI(String key) { String result = getValue(key); try { return URI.create(result); diff --git a/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/JSONUtil.java b/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/JSONUtil.java index a4a81561..0eb9b3d7 100644 --- a/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/JSONUtil.java +++ b/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/JSONUtil.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import java.io.ByteArrayOutputStream; @@ -14,6 +15,7 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -166,4 +168,27 @@ public static List asListOfString(JsonNode arrayOrString, String delimit return result; } + + /** + * Set an array attribute on a JSON object to a collection of Strings + * + * @param object The target JSON object + * @param attrName An attribute name + * @param elements The collection of strings + * @return Newly create ArrayNode + */ + public static ArrayNode setArrayOfStringsIfNotNull(JsonNode object, String attrName, Collection elements) { + if (elements == null) { + return null; + } + if (!(object instanceof ObjectNode)) { + throw new IllegalArgumentException("Unexpected JSON Node type (not ObjectNode): " + object.getClass()); + } + + ArrayNode list = ((ObjectNode) object).putArray(attrName); + for (String g: elements) { + list.add(g); + } + return list; + } } diff --git a/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/TokenInfo.java b/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/TokenInfo.java index 4d254072..aa6fdcb1 100644 --- a/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/TokenInfo.java +++ b/oauth-common/src/main/java/io/strimzi/kafka/oauth/common/TokenInfo.java @@ -105,14 +105,38 @@ public TokenInfo(JsonNode payload, String token, String principal, Set g * @param expiresAtMs The token's `expires at` time in millis */ public TokenInfo(String token, String scope, String principal, Set groups, long issuedAtMs, long expiresAtMs) { + this(token, + Collections.unmodifiableSet(new HashSet<>(Arrays.asList(scope != null ? scope.split(" ") : new String[0]))), + principal, + groups, + issuedAtMs, + expiresAtMs, + null); + } + + /** + * + * @param token The raw access token + * @param scopes The list of scopes + * @param principal The extracted user ID + * @param groups A set of groups extracted from JWT token or authorization server's inspect endpoint response + * @param issuedAtMs The token's `issued at` time in millis + * @param expiresAtMs The token's `expires at` time in millis + * @param payload The body of the JWT token or composed from authorization server's introspection endpoint response + */ + @SuppressFBWarnings("EI_EXPOSE_REP2") + // See: https://spotbugs.readthedocs.io/en/stable/bugDescriptions.html#ei2-may-expose-internal-representation-by-incorporating-reference-to-mutable-object-ei-expose-rep2 + public TokenInfo(String token, Set scopes, String principal, Set groups, long issuedAtMs, long expiresAtMs, JsonNode payload) { this.token = token; this.principal = principal; this.groups = groups != null ? Collections.unmodifiableSet(groups) : null; this.issuedAt = issuedAtMs; this.expiresAt = expiresAtMs; - - String[] parsedScopes = scope != null ? scope.split(" ") : new String[0]; - scopes = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(parsedScopes))); + this.scopes = scopes; + if (payload != null && !(payload instanceof ObjectNode)) { + throw new IllegalArgumentException("Unexpected JSON Node type (not ObjectNode): " + payload.getClass()); + } + this.payload = (ObjectNode) payload; } /** diff --git a/oauth-keycloak-authorizer/pom.xml b/oauth-keycloak-authorizer/pom.xml index c84a57f8..f8374af1 100644 --- a/oauth-keycloak-authorizer/pom.xml +++ b/oauth-keycloak-authorizer/pom.xml @@ -54,6 +54,11 @@ kafka-metadata provided + + com.github.spotbugs + spotbugs-annotations + provided + junit junit diff --git a/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/AuthzConfig.java b/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/AuthzConfig.java index a0933a93..17e6fdca 100644 --- a/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/AuthzConfig.java +++ b/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/AuthzConfig.java @@ -111,4 +111,8 @@ public class AuthzConfig extends Config { AuthzConfig(Properties p) { super(p); } + + AuthzConfig(Config config) { + super(config); + } } diff --git a/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/KeycloakAuthorizer.java b/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/KeycloakAuthorizer.java new file mode 100644 index 00000000..738fc42f --- /dev/null +++ b/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/KeycloakAuthorizer.java @@ -0,0 +1,175 @@ +/* + * Copyright 2017-2023, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.kafka.oauth.server.authorizer; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.strimzi.kafka.oauth.common.Config; +import io.strimzi.kafka.oauth.common.ConfigException; +import io.strimzi.kafka.oauth.server.OAuthKafkaPrincipalBuilder; +import org.apache.kafka.common.Uuid; +import org.apache.kafka.metadata.authorizer.AclMutator; +import org.apache.kafka.metadata.authorizer.ClusterMetadataAuthorizer; +import org.apache.kafka.metadata.authorizer.StandardAcl; +import org.apache.kafka.metadata.authorizer.StandardAuthorizer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; + +/** + * The Keycloak authorizer implementation that supports KRaft mode and delegates to + * org.apache.kafka.metadata.authorizer.StandardAuthorizer if strimzi.authorization.delegate.to.kafka.acl + * is set to true. + *

+ * This authorizer auto-detects whether the broker runs in KRaft mode or not based on the presence and value of process.roles config option. + * When in KRaft mode the authorizer relies on strimzi.authorization.reuse.grants behaviour, and automatically enables this mode. + *

+ * KeycloakAuthorizer works in conjunction with JaasServerOauthValidatorCallbackHandler, and requires + * {@link OAuthKafkaPrincipalBuilder} to be configured as 'principal.builder.class' in 'server.properties' file. + *

+ * To install this authorizer in Kafka, specify the following in your 'server.properties': + *

+ *
+ *     authorizer.class.name=io.strimzi.kafka.oauth.server.authorizer.KeycloakAuthorizer
+ *     principal.builder.class=io.strimzi.kafka.oauth.server.OAuthKafkaPrincipalBuilder
+ * 
+ * The functionality of this authorizer is mostly inherited from KeycloakRBACAuthorizer. + * For more configuration options see README.md and {@link io.strimzi.kafka.oauth.server.authorizer.KeycloakRBACAuthorizer}. + */ +public class KeycloakAuthorizer extends KeycloakRBACAuthorizer implements ClusterMetadataAuthorizer { + + static final Logger log = LoggerFactory.getLogger(KeycloakAuthorizer.class); + + private ClusterMetadataAuthorizer kraftAuthorizer; + + private boolean isKRaft; + private AclMutator mutator; + + @Override + AuthzConfig convertToAuthzConfig(Map configs) { + AuthzConfig superConfig = super.convertToAuthzConfig(configs); + isKRaft = detectKRaft(configs); + if (isKRaft) { + log.debug("Detected Kraft mode ('process.roles' configured)"); + return new AuthzConfigWithForcedReuseGrants(superConfig); + } + return superConfig; + } + + boolean detectKRaft(Map configs) { + // auto-detect KRAFT mode + Object prop = configs.get("process.roles"); + String processRoles = prop != null ? String.valueOf(prop) : null; + return processRoles != null && processRoles.length() > 0; + } + + @Override + void setupDelegateAuthorizer(Map configs) { + if (isKRaft) { + try { + kraftAuthorizer = new StandardAuthorizer(); + setDelegate(kraftAuthorizer); + log.debug("Using StandardAuthorizer (Kraft based) as a delegate"); + } catch (Exception e) { + throw new ConfigException("Kraft mode detected ('process.roles' configured), but failed to instantiate org.apache.kafka.metadata.authorizer.StandardAuthorizer", e); + } + } + super.setupDelegateAuthorizer(configs); + } + + /** + * Set the mutator object which should be used for creating and deleting ACLs. + */ + @SuppressFBWarnings("EI_EXPOSE_REP2") + // See https://spotbugs.readthedocs.io/en/stable/bugDescriptions.html#ei2-may-expose-internal-representation-by-incorporating-reference-to-mutable-object-ei-expose-rep2 + public void setAclMutator(AclMutator aclMutator) { + if (kraftAuthorizer != null) { + kraftAuthorizer.setAclMutator(aclMutator); + } + this.mutator = aclMutator; + } + + /** + * Get the mutator object which should be used for creating and deleting ACLs. + * + * @throws org.apache.kafka.common.errors.NotControllerException + * If the aclMutator was not set. + */ + @SuppressFBWarnings("EI_EXPOSE_REP") + // See https://spotbugs.readthedocs.io/en/stable/bugDescriptions.html#ei-may-expose-internal-representation-by-returning-reference-to-mutable-object-ei-expose-rep + public AclMutator aclMutatorOrException() { + if (kraftAuthorizer != null) { + return kraftAuthorizer.aclMutatorOrException(); + } + return mutator; + } + + /** + * Complete the initial load of the cluster metadata authorizer, so that all + * principals can use it. + */ + public void completeInitialLoad() { + if (kraftAuthorizer != null) { + kraftAuthorizer.completeInitialLoad(); + } + } + + /** + * Complete the initial load of the cluster metadata authorizer with an exception, + * indicating that the loading process has failed. + */ + public void completeInitialLoad(Exception e) { + if (kraftAuthorizer != null) { + kraftAuthorizer.completeInitialLoad(e); + } + log.error("Failed to load authorizer cluster metadata", e); + } + + /** + * Load the ACLs in the given map. Anything not in the map will be removed. + * The authorizer will also wait for this initial snapshot load to complete when + * coming up. + */ + public void loadSnapshot(Map acls) { + if (kraftAuthorizer != null) { + kraftAuthorizer.loadSnapshot(acls); + } + } + + /** + * Add a new ACL. Any ACL with the same ID will be replaced. + */ + public void addAcl(Uuid id, StandardAcl acl) { + if (kraftAuthorizer == null) { + throw new UnsupportedOperationException("StandardAuthorizer ACL delegation not enabled"); + } + kraftAuthorizer.addAcl(id, acl); + } + + /** + * Remove the ACL with the given ID. + */ + public void removeAcl(Uuid id) { + if (kraftAuthorizer == null) { + throw new UnsupportedOperationException("StandardAuthorizer ACL delegation not enabled"); + } + kraftAuthorizer.removeAcl(id); + } + + private static class AuthzConfigWithForcedReuseGrants extends AuthzConfig { + AuthzConfigWithForcedReuseGrants(Config superConfig) { + super(superConfig); + } + + @Override + public String getValue(String key, String fallback) { + if (AuthzConfig.STRIMZI_AUTHORIZATION_REUSE_GRANTS.equals(key)) { + log.debug("Configuration option '" + AuthzConfig.STRIMZI_AUTHORIZATION_REUSE_GRANTS + "' forced to 'true'"); + return "true"; + } + return super.getValue(key, fallback); + } + } +} diff --git a/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/KeycloakRBACAuthorizer.java b/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/KeycloakRBACAuthorizer.java index 18447161..19bb6f7f 100644 --- a/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/KeycloakRBACAuthorizer.java +++ b/oauth-keycloak-authorizer/src/main/java/io/strimzi/kafka/oauth/server/authorizer/KeycloakRBACAuthorizer.java @@ -31,7 +31,6 @@ import org.apache.kafka.common.acl.AclBindingFilter; import org.apache.kafka.common.resource.ResourcePattern; import org.apache.kafka.common.security.auth.KafkaPrincipal; -import org.apache.kafka.metadata.authorizer.StandardAuthorizer; import org.apache.kafka.server.authorizer.AclCreateResult; import org.apache.kafka.server.authorizer.AclDeleteResult; import org.apache.kafka.server.authorizer.Action; @@ -203,7 +202,7 @@ public class KeycloakRBACAuthorizer implements Authorizer { @Override public void configure(Map configs) { - AuthzConfig config = convertToCommonConfig(configs); + AuthzConfig config = convertToAuthzConfig(configs); String pbclass = (String) configs.get("principal.builder.class"); if (!PRINCIPAL_BUILDER_CLASS.equals(pbclass) && !DEPRECATED_PRINCIPAL_BUILDER_CLASS.equals(pbclass)) { @@ -284,25 +283,39 @@ public void configure(Map configs) { } } - private void setupDelegateAuthorizer(Map configs) { - // auto-detect KRAFT mode - Object prop = configs.get("process.roles"); - String processRoles = prop != null ? String.valueOf(prop) : null; - if (processRoles != null && processRoles.length() > 0) { - try { - log.debug("Detected Kraft mode ('process.roles' configured)"); - delegate = new StandardAuthorizer(); - log.debug("Using StandardAuthorizer (Kraft based) as a delegate"); - } catch (Exception e) { - throw new ConfigException("Kraft mode detected ('process.roles' configured), but failed to instantiate org.apache.kafka.metadata.authorizer.StandardAuthorizer", e); - } - } + /** + * The subclass can override this method to provide configuration overrides + * + * @param configs Original configuration map passed in by Kafka broker + * @return AuthzConfig instance + */ + AuthzConfig convertToAuthzConfig(Map configs) { + return convertToCommonConfig(configs); + } + + /** + * This method is only called if delegateToKafkaACL is enabled. + * It is responsible for instantiating the Authorizer delegate instance. + * + * @param configs The configuration that may be used to decide which delegate class to instantiate + */ + void setupDelegateAuthorizer(Map configs) { if (delegate == null) { log.debug("Using AclAuthorizer (ZooKeeper based) as a delegate"); delegate = new AclAuthorizer(); } } + + /** + * Allows setting the delegate by a subclass overriding {@link #setupDelegateAuthorizer(Map)} method. + * + * @param delegate An authorizer instantiated as a delegate + */ + void setDelegate(Authorizer delegate) { + this.delegate = delegate; + } + private void configureHttpTimeouts(AuthzConfig config) { connectTimeoutSeconds = ConfigUtil.getTimeoutConfigWithFallbackLookup(config, AuthzConfig.STRIMZI_AUTHORIZATION_CONNECT_TIMEOUT_SECONDS, ClientConfig.OAUTH_CONNECT_TIMEOUT_SECONDS); readTimeoutSeconds = ConfigUtil.getTimeoutConfigWithFallbackLookup(config, AuthzConfig.STRIMZI_AUTHORIZATION_READ_TIMEOUT_SECONDS, ClientConfig.OAUTH_READ_TIMEOUT_SECONDS); @@ -475,6 +488,7 @@ public List authorize(AuthorizableRequestContext requestCon if (!(principal instanceof OAuthKafkaPrincipal)) { // If user wasn't authenticated over OAuth, and simple ACL delegation is enabled // we delegate to simple ACL + result = delegateIfRequested(requestContext, actions, null); addAuthzMetricSuccessTime(startTime); @@ -486,16 +500,14 @@ public List authorize(AuthorizableRequestContext requestCon // If not, fetch authorization grants and store them in the token // - OAuthKafkaPrincipal jwtPrincipal = (OAuthKafkaPrincipal) principal; - - BearerTokenWithPayload token = jwtPrincipal.getJwt(); + BearerTokenWithPayload token = ((OAuthKafkaPrincipal) principal).getJwt(); if (denyIfTokenInvalid(token)) { addAuthzMetricSuccessTime(startTime); return Collections.nCopies(actions.size(), AuthorizationResult.DENIED); } - grants = (JsonNode) token.getPayload(); + grants = token.getPayload(); if (grants == null) { log.debug("No grants yet for user: {}", principal); @@ -658,7 +670,7 @@ private static JsonNode lookupGrantsInExistingSessions(BearerTokenWithPayload to BearerTokenWithPayload existing = sessions.findFirst(t -> t.value().equals(token.value()) && t.getPayload() != null ); - return existing != null ? (JsonNode) existing.getPayload() : null; + return existing != null ? existing.getPayload() : null; } static List validateScopes(List scopes) { @@ -866,8 +878,8 @@ private void refreshGrants() { refreshed = t; continue; } - Object oldGrants = t.getPayload(); - Object newGrants = refreshed.getPayload(); + JsonNode oldGrants = t.getPayload(); + JsonNode newGrants = refreshed.getPayload(); if (newGrants == null) { newGrants = JSONUtil.newObjectNode(); } @@ -992,5 +1004,4 @@ private void addGrantsHttpMetricErrorTime(Throwable e, long startTimeMs) { metrics.addTime(grantsSensorKeyProducer.errorKey(e), System.currentTimeMillis() - startTimeMs); } } - } diff --git a/oauth-server-plain/src/test/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalBuilderTest.java b/oauth-server-plain/src/test/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalBuilderTest.java index 5fc21f44..360225c2 100644 --- a/oauth-server-plain/src/test/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalBuilderTest.java +++ b/oauth-server-plain/src/test/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalBuilderTest.java @@ -5,6 +5,7 @@ package io.strimzi.kafka.oauth.server; import io.strimzi.kafka.oauth.common.BearerTokenWithPayload; +import io.strimzi.kafka.oauth.common.JSONUtil; import io.strimzi.kafka.oauth.services.Credentials; import io.strimzi.kafka.oauth.services.Principals; import io.strimzi.kafka.oauth.services.Services; @@ -34,7 +35,7 @@ public void testPreviousStoredPrincipalIsReused() { // Simulate authentication using OAuth over PLAIN BearerTokenWithPayload token = mock(BearerTokenWithPayload.class); - when(token.getPayload()).thenReturn("jwttoken"); + when(token.getPayload()).thenReturn(JSONUtil.asJson("{}")); OAuthKafkaPrincipal authenticatedPrincipal = new OAuthKafkaPrincipal(KafkaPrincipal.USER_TYPE, USERNAME, token); credentials.storeCredentials(USERNAME, authenticatedPrincipal); diff --git a/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/BearerTokenWithGrants.java b/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/BearerTokenWithGrants.java new file mode 100644 index 00000000..52ee23c9 --- /dev/null +++ b/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/BearerTokenWithGrants.java @@ -0,0 +1,142 @@ +/* + * Copyright 2017-2023, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.kafka.oauth.server; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.strimzi.kafka.oauth.common.BearerTokenWithPayload; +import io.strimzi.kafka.oauth.common.JSONUtil; +import io.strimzi.kafka.oauth.common.TimeUtil; +import io.strimzi.kafka.oauth.common.TokenInfo; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +class BearerTokenWithGrants implements BearerTokenWithPayload { + + private final TokenInfo ti; + private volatile JsonNode payload; + + BearerTokenWithGrants(TokenInfo ti) { + if (ti == null) { + throw new IllegalArgumentException("TokenInfo == null"); + } + this.ti = ti; + } + + @Override + public synchronized JsonNode getPayload() { + return payload; + } + + @Override + public synchronized void setPayload(JsonNode value) { + payload = value; + } + + @Override + public Set getGroups() { + return ti.groups(); + } + + @Override + public ObjectNode getJSON() { + return ti.payload(); + } + + @Override + public String value() { + return ti.token(); + } + + @Override + public Set scope() { + return ti.scope(); + } + + @Override + public long lifetimeMs() { + return ti.expiresAtMs(); + } + + @Override + public String principalName() { + return ti.principal(); + } + + @Override + public Long startTimeMs() { + return ti.issuedAtMs(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BearerTokenWithGrants that = (BearerTokenWithGrants) o; + return Objects.equals(ti, that.ti); + } + + @Override + public int hashCode() { + return Objects.hash(ti); + } + + @Override + public String toString() { + return "BearerTokenWithPayloadImpl (principalName: " + ti.principal() + ", groups: " + ti.groups() + ", lifetimeMs: " + + ti.expiresAtMs() + " [" + TimeUtil.formatIsoDateTimeUTC(ti.expiresAtMs()) + " UTC], startTimeMs: " + + ti.issuedAtMs() + " [" + TimeUtil.formatIsoDateTimeUTC(ti.issuedAtMs()) + " UTC], scope: " + ti.scope() + ")"; + } + + static class Serde { + + private static final String TOKEN = "t"; + private static final String SCOPES = "sc"; + private static final String GROUPS = "g"; + private static final String PRINCIPAL = "n"; + private static final String START_TIME = "st"; + private static final String EXPIRY_TIME = "e"; + private static final String TOKEN_CLAIMS = "j"; + private static final String EXTRA_PAYLOAD = "p"; + + + public byte[] serialize(BearerTokenWithGrants token) throws IOException { + ObjectNode object = JSONUtil.newObjectNode(); + object.put(PRINCIPAL, token.principalName()); + JSONUtil.setArrayOfStringsIfNotNull(object, GROUPS, token.getGroups()); + JSONUtil.setArrayOfStringsIfNotNull(object, SCOPES, token.scope()); + object.put(TOKEN, token.value()); + object.put(START_TIME, token.startTimeMs()); + object.put(EXPIRY_TIME, token.lifetimeMs()); + object.set(TOKEN_CLAIMS, token.getJSON()); + object.set(EXTRA_PAYLOAD, token.getPayload()); + return JSONUtil.MAPPER.writeValueAsBytes(object); + } + + public BearerTokenWithGrants deserialize(byte[] bytes) throws IOException { + ObjectNode object = JSONUtil.MAPPER.readValue(bytes, ObjectNode.class); + JsonNode groups = object.get(GROUPS); + JsonNode scopes = object.get(SCOPES); + JsonNode json = object.get(TOKEN_CLAIMS); + JsonNode payload = object.get(EXTRA_PAYLOAD); + BearerTokenWithGrants result = new BearerTokenWithGrants( + new TokenInfo(object.get(TOKEN).asText(), + scopes != null && scopes.isArray() ? new HashSet<>(JSONUtil.asListOfString(scopes, ",")) : null, + object.get(PRINCIPAL).asText(), + groups != null && groups.isArray() ? new HashSet<>(JSONUtil.asListOfString(groups, ",")) : null, + object.get(START_TIME).asLong(), + object.get(EXPIRY_TIME).asLong(), + json.isNull() ? null : json)); + + if (!payload.isNull()) { + result.setPayload(payload); + } + return result; + } + } +} diff --git a/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/JaasServerOauthValidatorCallbackHandler.java b/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/JaasServerOauthValidatorCallbackHandler.java index f082f9f1..505d419c 100644 --- a/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/JaasServerOauthValidatorCallbackHandler.java +++ b/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/JaasServerOauthValidatorCallbackHandler.java @@ -4,14 +4,11 @@ */ package io.strimzi.kafka.oauth.server; -import com.fasterxml.jackson.databind.node.ObjectNode; import io.strimzi.kafka.oauth.common.Config; import io.strimzi.kafka.oauth.common.ConfigException; import io.strimzi.kafka.oauth.common.ConfigUtil; -import io.strimzi.kafka.oauth.common.BearerTokenWithPayload; import io.strimzi.kafka.oauth.common.IOUtil; import io.strimzi.kafka.oauth.common.PrincipalExtractor; -import io.strimzi.kafka.oauth.common.TimeUtil; import io.strimzi.kafka.oauth.jsonpath.JsonPathFilterQuery; import io.strimzi.kafka.oauth.metrics.IntrospectValidationSensorKeyProducer; import io.strimzi.kafka.oauth.metrics.JwksValidationSensorKeyProducer; @@ -40,9 +37,7 @@ import java.net.URI; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Properties; -import java.util.Set; import java.util.function.Supplier; import static io.strimzi.kafka.oauth.common.DeprecationUtil.isAccessTokenJwt; @@ -605,7 +600,7 @@ private void handleCallback(OAuthBearerValidatorCallback callback) { debugLogToken(token); TokenInfo ti = validateToken(token); - callback.token(new BearerTokenWithPayloadImpl(ti)); + callback.token(new BearerTokenWithGrants(ti)); if (log.isDebugEnabled()) { log.debug("Set validated token on callback: " + callback.token()); } @@ -732,84 +727,6 @@ public int getReadTimeout() { return readTimeout; } - static class BearerTokenWithPayloadImpl implements BearerTokenWithPayload { - - private final TokenInfo ti; - private volatile Object payload; - - BearerTokenWithPayloadImpl(TokenInfo ti) { - if (ti == null) { - throw new IllegalArgumentException("TokenInfo == null"); - } - this.ti = ti; - } - - @Override - public synchronized Object getPayload() { - return payload; - } - - @Override - public synchronized void setPayload(Object value) { - payload = value; - } - - @Override - public Set getGroups() { - return ti.groups(); - } - - @Override - public ObjectNode getJSON() { - return ti.payload(); - } - - @Override - public String value() { - return ti.token(); - } - - @Override - public Set scope() { - return ti.scope(); - } - - @Override - public long lifetimeMs() { - return ti.expiresAtMs(); - } - - @Override - public String principalName() { - return ti.principal(); - } - - @Override - public Long startTimeMs() { - return ti.issuedAtMs(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - BearerTokenWithPayloadImpl that = (BearerTokenWithPayloadImpl) o; - return Objects.equals(ti, that.ti); - } - - @Override - public int hashCode() { - return Objects.hash(ti); - } - - @Override - public String toString() { - return "BearerTokenWithPayloadImpl (principalName: " + ti.principal() + ", groups: " + ti.groups() + ", lifetimeMs: " + - ti.expiresAtMs() + " [" + TimeUtil.formatIsoDateTimeUTC(ti.expiresAtMs()) + " UTC], startTimeMs: " + - ti.issuedAtMs() + " [" + TimeUtil.formatIsoDateTimeUTC(ti.issuedAtMs()) + " UTC], scope: " + ti.scope() + ")"; - } - } - protected String getConfigId() { if (validator == null) { throw new IllegalStateException("This method can only be invoked after the validator was configured"); diff --git a/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalBuilder.java b/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalBuilder.java index 6882f649..91831c9b 100644 --- a/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalBuilder.java +++ b/oauth-server/src/main/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalBuilder.java @@ -9,6 +9,11 @@ import io.strimzi.kafka.oauth.services.Services; import org.apache.kafka.common.Configurable; import org.apache.kafka.common.config.internals.BrokerSecurityConfigs; +import org.apache.kafka.common.errors.SerializationException; +import org.apache.kafka.common.message.DefaultPrincipalData; +import org.apache.kafka.common.protocol.ByteBufferAccessor; +import org.apache.kafka.common.protocol.MessageUtil; +import org.apache.kafka.common.protocol.types.RawTaggedField; import org.apache.kafka.common.security.auth.AuthenticationContext; import org.apache.kafka.common.security.auth.KafkaPrincipal; import org.apache.kafka.common.security.auth.SaslAuthenticationContext; @@ -18,9 +23,11 @@ import org.apache.kafka.common.security.plain.internals.PlainSaslServer; import javax.security.sasl.SaslServer; +import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.nio.ByteBuffer; import java.security.PrivilegedAction; import java.util.Collections; import java.util.List; @@ -157,4 +164,51 @@ public KafkaPrincipal build(AuthenticationContext context) { return super.build(context); } + + + @Override + public byte[] serialize(KafkaPrincipal principal) { + if (principal instanceof OAuthKafkaPrincipal) { + DefaultPrincipalData data = new DefaultPrincipalData() + .setType(principal.getPrincipalType()) + .setName(principal.getName()) + .setTokenAuthenticated(principal.tokenAuthenticated()); + BearerTokenWithPayload token = ((OAuthKafkaPrincipal) principal).getJwt(); + if (token instanceof BearerTokenWithGrants) { + try { + data.unknownTaggedFields().add(new RawTaggedField(575, new BearerTokenWithGrants.Serde().serialize((BearerTokenWithGrants) token))); + } catch (IOException e) { + throw new SerializationException("Failed to serialize OAuthKafkaPrincipal", e); + } + } + + return MessageUtil.toVersionPrefixedBytes(DefaultPrincipalData.HIGHEST_SUPPORTED_VERSION, data); + } + return super.serialize(principal); + } + + @Override + public KafkaPrincipal deserialize(byte[] bytes) { + ByteBuffer buffer = ByteBuffer.wrap(bytes); + short version = buffer.getShort(); + if (version < DefaultPrincipalData.LOWEST_SUPPORTED_VERSION || version > DefaultPrincipalData.HIGHEST_SUPPORTED_VERSION) { + throw new SerializationException("Invalid principal data version " + version); + } + + DefaultPrincipalData data = new DefaultPrincipalData(new ByteBufferAccessor(buffer), version); + List unknownFields = data.unknownTaggedFields(); + if (unknownFields.size() > 0) { + RawTaggedField field = unknownFields.get(0); + if (field.tag() == 575) { + try { + OAuthKafkaPrincipal result = new OAuthKafkaPrincipal(data.type(), data.name(), new BearerTokenWithGrants.Serde().deserialize(field.data())); + result.tokenAuthenticated(data.tokenAuthenticated()); + return result; + } catch (IOException e) { + throw new SerializationException("Failed to de-serialize OAuthKafkaPrincipal", e); + } + } + } + return new KafkaPrincipal(data.type(), data.name(), data.tokenAuthenticated()); + } } diff --git a/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/MockBearerTokenWithPayload.java b/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/MockBearerTokenWithPayload.java index 8058ab51..12519d82 100644 --- a/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/MockBearerTokenWithPayload.java +++ b/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/MockBearerTokenWithPayload.java @@ -4,6 +4,7 @@ */ package io.strimzi.kafka.oauth.server; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.strimzi.kafka.oauth.common.BearerTokenWithPayload; @@ -20,9 +21,9 @@ public class MockBearerTokenWithPayload implements BearerTokenWithPayload { private final long lifetime; private final Set scopes; private final String token; - private Object payload; + private JsonNode payload; - MockBearerTokenWithPayload(String principalName, Set groups, long createTime, long lifetime, String scope, String token, Object payload) { + MockBearerTokenWithPayload(String principalName, Set groups, long createTime, long lifetime, String scope, String token, JsonNode payload) { this.principalName = principalName; this.groups = groups; this.createTime = createTime; @@ -38,12 +39,12 @@ public class MockBearerTokenWithPayload implements BearerTokenWithPayload { } @Override - public Object getPayload() { + public JsonNode getPayload() { return payload; } @Override - public void setPayload(Object payload) { + public void setPayload(JsonNode payload) { this.payload = payload; } diff --git a/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalTest.java b/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalTest.java index 3128338f..90738e1d 100644 --- a/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalTest.java +++ b/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/OAuthKafkaPrincipalTest.java @@ -22,7 +22,7 @@ public class OAuthKafkaPrincipalTest { public void testEquals() { BearerTokenWithPayload token = new MockBearerTokenWithPayload("service-account-my-client", new HashSet<>(Arrays.asList("group1", "group2")), - System.currentTimeMillis(), System.currentTimeMillis() + 60000, null, "BEARER-TOKEN-9823eh982u", "Whatever"); + System.currentTimeMillis(), System.currentTimeMillis() + 60000, null, "BEARER-TOKEN-9823eh982u", JSONUtil.asJson("{}")); OAuthKafkaPrincipal principal = new OAuthKafkaPrincipal("User", "service-account-my-client", token); @@ -72,7 +72,7 @@ public void testJwtPrincipal() throws IOException { JsonNode parsed = JSONUtil.readJSON(json, JsonNode.class); TokenInfo tki = new TokenInfo(parsed, rawToken, "bob"); - BearerTokenWithPayload jwt = new JaasServerOauthValidatorCallbackHandler.BearerTokenWithPayloadImpl(tki); + BearerTokenWithPayload jwt = new BearerTokenWithGrants(tki); OAuthKafkaPrincipal principalJwt = new OAuthKafkaPrincipal("User", "bob", jwt); Assert.assertEquals("Can access parsed JWT", parsed, principalJwt.getJwt().getJSON()); diff --git a/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/OAuthSessionAuthorizerTest.java b/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/OAuthSessionAuthorizerTest.java index 880b666e..6eccbf34 100644 --- a/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/OAuthSessionAuthorizerTest.java +++ b/oauth-server/src/test/java/io/strimzi/kafka/oauth/server/OAuthSessionAuthorizerTest.java @@ -106,7 +106,7 @@ private void testOAuthUserWithDelegate(Authorizer authorizer, MockAuthorizer del TokenInfo tokenInfo = new TokenInfo("accesstoken123", null, "User:bob", new HashSet<>(Arrays.asList("group1", "group2")), System.currentTimeMillis() - 100000, System.currentTimeMillis() + 100000); - BearerTokenWithPayload token = new JaasServerOauthValidatorCallbackHandler.BearerTokenWithPayloadImpl(tokenInfo); + BearerTokenWithPayload token = new BearerTokenWithGrants(tokenInfo); AuthorizableRequestContext ctx = requestContext(new OAuthKafkaPrincipal("User", "bob", token)); @@ -136,7 +136,7 @@ public void testOAuthUserWithExpiredTokenWithDelegate(Authorizer authorizer, Moc TokenInfo tokenInfo = new TokenInfo("accesstoken234", null, "User:bob", null, System.currentTimeMillis() - 200000, System.currentTimeMillis() - 100000); - BearerTokenWithPayload token = new JaasServerOauthValidatorCallbackHandler.BearerTokenWithPayloadImpl(tokenInfo); + BearerTokenWithPayload token = new BearerTokenWithGrants(tokenInfo); AuthorizableRequestContext ctx = requestContext(new OAuthKafkaPrincipal("User", "bob", token)); @@ -229,7 +229,7 @@ private void testOAuthUserWithoutDelegate(Authorizer authorizer) throws Exceptio TokenInfo tokenInfo = new TokenInfo("accesstoken123", null, "User:bob", null, System.currentTimeMillis() - 100000, System.currentTimeMillis() + 100000); - BearerTokenWithPayload token = new JaasServerOauthValidatorCallbackHandler.BearerTokenWithPayloadImpl(tokenInfo); + BearerTokenWithPayload token = new BearerTokenWithGrants(tokenInfo); AuthorizableRequestContext ctx = requestContext(new OAuthKafkaPrincipal("User", "bob", token)); @@ -253,7 +253,7 @@ private void testOAuthUserWithExpiredTokenWithoutDelegate(Authorizer authorizer) TokenInfo tokenInfo = new TokenInfo("accesstoken234", null, "User:bob", null, System.currentTimeMillis() - 200000, System.currentTimeMillis() - 100000); - BearerTokenWithPayload token = new JaasServerOauthValidatorCallbackHandler.BearerTokenWithPayloadImpl(tokenInfo); + BearerTokenWithPayload token = new BearerTokenWithGrants(tokenInfo); OAuthKafkaPrincipal principal = new OAuthKafkaPrincipal("User", "bob", token); List actions = Collections.singletonList( diff --git a/testsuite/common/src/main/java/io/strimzi/testsuite/oauth/common/TestUtil.java b/testsuite/common/src/main/java/io/strimzi/testsuite/oauth/common/TestUtil.java index cbe98677..3895afc3 100644 --- a/testsuite/common/src/main/java/io/strimzi/testsuite/oauth/common/TestUtil.java +++ b/testsuite/common/src/main/java/io/strimzi/testsuite/oauth/common/TestUtil.java @@ -11,6 +11,8 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.TimeoutException; +import java.util.function.Supplier; import java.util.regex.Pattern; public class TestUtil { @@ -55,4 +57,19 @@ public static List getContainerLogsForString(String containerName, Strin throw new RuntimeException("Failed to get '" + containerName + "' log", e); } } + + public static void waitForCondition(Supplier condition, int loopPauseMs, int timeoutSeconds) throws TimeoutException, InterruptedException { + long startTime = System.currentTimeMillis(); + boolean done; + do { + done = condition.get(); + if (!done) { + // Condition not met + if (System.currentTimeMillis() + loopPauseMs - startTime >= timeoutSeconds * 1000L) { + throw new TimeoutException("Condition not met in " + timeoutSeconds + " seconds"); + } + Thread.sleep(loopPauseMs); + } + } while (!done); + } } diff --git a/testsuite/docker/kafka/Dockerfile b/testsuite/docker/kafka/Dockerfile index 2fd56f24..066bca3b 100644 --- a/testsuite/docker/kafka/Dockerfile +++ b/testsuite/docker/kafka/Dockerfile @@ -1,4 +1,4 @@ -FROM quay.io/strimzi/kafka:0.33.2-kafka-3.4.0 +FROM quay.io/strimzi/kafka:0.34.0-kafka-3.4.0 USER root RUN rm -rf /opt/kafka/libs/bcpkix* /opt/kafka/libs/bcprov* /opt/kafka/libs/keycloak* diff --git a/testsuite/docker/kafka/config/log4j.properties b/testsuite/docker/kafka/config/log4j.properties index 085f5f61..f8a1a7d2 100644 --- a/testsuite/docker/kafka/config/log4j.properties +++ b/testsuite/docker/kafka/config/log4j.properties @@ -64,6 +64,7 @@ log4j.logger.org.apache.zookeeper=INFO # Change the two lines below to adjust the general broker logging level (output to server.log and stdout) log4j.logger.kafka=INFO log4j.logger.org.apache.kafka=INFO +#log4j.logger.kafka.server.metadata.BrokerMetadataListener=DEBUG # Control Strimzi OAuth logging log4j.logger.io.strimzi=TRACE diff --git a/testsuite/docker/kafka/scripts/simple_kafka_config.sh b/testsuite/docker/kafka/scripts/simple_kafka_config.sh index 2ae04f26..38787ea3 100755 --- a/testsuite/docker/kafka/scripts/simple_kafka_config.sh +++ b/testsuite/docker/kafka/scripts/simple_kafka_config.sh @@ -52,29 +52,57 @@ done # # Generate output # -echo "#" -echo "# strimzi.properties" -echo "#" -echo broker.id=`pop_value broker.id 0` -echo num.network.threads=`pop_value num.network.threads 3` -echo num.io.threads=`pop_value num.io.threads 8` -echo socket.send.buffer.bytes=`pop_value socket.send.buffer.bytes 102400` -echo socket.receive.buffer.bytes=`pop_value socket.receive.buffer.bytes 102400` -echo socket.request.max.bytes=`pop_value socket.request.max.bytes 104857600` -echo log.dirs=`pop_value log.dirs /tmp/kafka-logs` -echo num.partitions=`pop_value num.partitions 1` -echo num.recovery.threads.per.data.dir=`pop_value num.recovery.threads.per.data.dir 1` -echo offsets.topic.replication.factor=`pop_value offsets.topic.replication.factor 1` -echo transaction.state.log.replication.factor=`pop_value transaction.state.log.replication.factor 1` -echo transaction.state.log.min.isr=`pop_value transaction.state.log.min.isr 1` -echo log.retention.hours=`pop_value log.retention.hours 168` -echo log.segment.bytes=`pop_value log.segment.bytes 1073741824` -echo log.retention.check.interval.ms=`pop_value log.retention.check.interval.ms 300000` -echo zookeeper.connect=`pop_value zookeeper.connect localhost:2181` -echo zookeeper.connection.timeout.ms=`pop_value zookeeper.connection.timeout.ms 6000` -echo group.initial.rebalance.delay.ms=`pop_value group.initial.rebalance.delay.ms 0` +if [[ "$1" == "--kraft" ]]; then + # + # Output kraft version of server.properties + # + echo "#" + echo "# strimzi.properties (kraft)" + echo "#" + echo process.roles=`pop_value process.roles broker,controller` + echo node.id=`pop_value node.id 1` + echo num.network.threads=`pop_value num.network.threads 3` + echo num.io.threads=`pop_value num.io.threads 8` + echo socket.send.buffer.bytes=`pop_value socket.send.buffer.bytes 102400` + echo socket.receive.buffer.bytes=`pop_value socket.receive.buffer.bytes 102400` + echo socket.request.max.bytes=`pop_value socket.request.max.bytes 104857600` + echo log.dirs=`pop_value log.dirs /tmp/kraft-combined-logs` + echo num.partitions=`pop_value num.partitions 1` + echo num.recovery.threads.per.data.dir=`pop_value num.recovery.threads.per.data.dir 1` + echo offsets.topic.replication.factor=`pop_value offsets.topic.replication.factor 1` + echo transaction.state.log.replication.factor=`pop_value transaction.state.log.replication.factor 1` + echo transaction.state.log.min.isr=`pop_value transaction.state.log.min.isr 1` + echo log.retention.hours=`pop_value log.retention.hours 168` + echo log.segment.bytes=`pop_value log.segment.bytes 1073741824` + echo log.retention.check.interval.ms=`pop_value log.retention.check.interval.ms 300000` + +elif [[ "$1" == "" ]]; then + echo "#" + echo "# strimzi.properties" + echo "#" + + echo broker.id=`pop_value broker.id 0` + echo num.network.threads=`pop_value num.network.threads 3` + echo num.io.threads=`pop_value num.io.threads 8` + echo socket.send.buffer.bytes=`pop_value socket.send.buffer.bytes 102400` + echo socket.receive.buffer.bytes=`pop_value socket.receive.buffer.bytes 102400` + echo socket.request.max.bytes=`pop_value socket.request.max.bytes 104857600` + echo log.dirs=`pop_value log.dirs /tmp/kafka-logs` + echo num.partitions=`pop_value num.partitions 1` + echo num.recovery.threads.per.data.dir=`pop_value num.recovery.threads.per.data.dir 1` + echo offsets.topic.replication.factor=`pop_value offsets.topic.replication.factor 1` + echo transaction.state.log.replication.factor=`pop_value transaction.state.log.replication.factor 1` + echo transaction.state.log.min.isr=`pop_value transaction.state.log.min.isr 1` + echo log.retention.hours=`pop_value log.retention.hours 168` + echo log.segment.bytes=`pop_value log.segment.bytes 1073741824` + echo log.retention.check.interval.ms=`pop_value log.retention.check.interval.ms 300000` + echo group.initial.rebalance.delay.ms=`pop_value group.initial.rebalance.delay.ms 0` +else + echo "Unsupported argument: $1" + exit 1 +fi # # Add what remains of KAFKA_* env vars # diff --git a/testsuite/docker/kafka/scripts/start.sh b/testsuite/docker/kafka/scripts/start.sh index 3722f759..9668c5c2 100755 --- a/testsuite/docker/kafka/scripts/start.sh +++ b/testsuite/docker/kafka/scripts/start.sh @@ -12,14 +12,35 @@ wait_for_url $URI "Waiting for Keycloak to start" wait_for_url "$URI/realms/${REALM:-demo}" "Waiting for realm '${REALM}' to be available" -./simple_kafka_config.sh | tee /tmp/strimzi.properties +[ "$KAFKA_ZOOKEEPER_CONNECT" == "" ] && KAFKA_ZOOKEEPER_CONNECT=localhost:2181 +[ "$KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS" == "" ] && KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS=6000 -# Add 'admin' user -KAFKA_DEBUG= /opt/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config 'SCRAM-SHA-512=[password=admin-secret]' --entity-type users --entity-name admin +./simple_kafka_config.sh $1 | tee /tmp/strimzi.properties -# Add 'alice' user -KAFKA_DEBUG= /opt/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config 'SCRAM-SHA-512=[password=alice-secret]' --entity-type users --entity-name alice +echo "Config created" + +KAFKA_DEBUG_PASSED=$KAFKA_DEBUG +unset KAFKA_DEBUG + +# add extra jars to classpath +export CLASSPATH="/opt/kafka/libs/strimzi/*:$CLASSPATH" +echo "CLASSPATH=$CLASSPATH" + +if [[ "$1" == "--kraft" ]]; then + KAFKA_CLUSTER_ID="$(/opt/kafka/bin/kafka-storage.sh random-uuid)" + /opt/kafka/bin/kafka-storage.sh format -t $KAFKA_CLUSTER_ID -c /tmp/strimzi.properties + echo "Initialised kafka storage for KRaft" +else + # Add 'admin' user + /opt/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config 'SCRAM-SHA-512=[password=admin-secret]' --entity-type users --entity-name admin + # Add 'alice' user + /opt/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config 'SCRAM-SHA-512=[password=alice-secret]' --entity-type users --entity-name alice + + echo "Added user secrets for SCRAM" +fi + +export KAFKA_DEBUG=$KAFKA_DEBUG_PASSED # set log dir to writable directory @@ -33,10 +54,6 @@ if [ "$KAFKA_LOG4J_OPTS" == "" ]; then fi echo "KAFKA_LOG4J_OPTS=$KAFKA_LOG4J_OPTS" -# add extra jars to classpath -export CLASSPATH="/opt/kafka/libs/strimzi/*:$CLASSPATH" -echo "CLASSPATH=$CLASSPATH" - # Prometheus JMX agent config if [ "$PROMETHEUS_AGENT_CONFIG" == "" ]; then diff --git a/testsuite/docker/kafka/scripts/start_no_wait.sh b/testsuite/docker/kafka/scripts/start_no_wait.sh index d432fc49..1c82d751 100755 --- a/testsuite/docker/kafka/scripts/start_no_wait.sh +++ b/testsuite/docker/kafka/scripts/start_no_wait.sh @@ -1,6 +1,9 @@ #!/bin/bash set -e +[ "$KAFKA_ZOOKEEPER_CONNECT" == "" ] && KAFKA_ZOOKEEPER_CONNECT=localhost:2181 +[ "$KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS" == "" ] && KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS=6000 + ./simple_kafka_config.sh | tee /tmp/strimzi.properties # set log dir to writable directory diff --git a/testsuite/docker/kafka/scripts/start_with_hydra.sh b/testsuite/docker/kafka/scripts/start_with_hydra.sh index c315f176..e6b69ca1 100755 --- a/testsuite/docker/kafka/scripts/start_with_hydra.sh +++ b/testsuite/docker/kafka/scripts/start_with_hydra.sh @@ -11,6 +11,10 @@ URI="https://hydra-jwt:4455/clients" wait_for_url $URI "Waiting for Hydra JWT admin REST to start" wait_for_url $URI/kafka-broker "Waiting for kafka-broker client to be available" + +[ "$KAFKA_ZOOKEEPER_CONNECT" == "" ] && KAFKA_ZOOKEEPER_CONNECT=localhost:2181 +[ "$KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS" == "" ] && KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS=6000 + ./simple_kafka_config.sh | tee /tmp/strimzi.properties # set log dir to writable directory diff --git a/testsuite/docker/keycloak/realms/kafka-authz-realm.json b/testsuite/docker/keycloak/realms/kafka-authz-realm.json index 2f3532e9..b9390285 100644 --- a/testsuite/docker/keycloak/realms/kafka-authz-realm.json +++ b/testsuite/docker/keycloak/realms/kafka-authz-realm.json @@ -529,6 +529,9 @@ "attributes": {}, "uris": [], "scopes": [ + { + "name": "Create" + }, { "name": "DescribeConfigs" }, @@ -612,6 +615,9 @@ "attributes" : { }, "uris" : [ ], "scopes": [ + { + "name": "Create" + }, { "name": "DescribeConfigs" }, diff --git a/testsuite/keycloak-authz-kraft-tests/docker-compose.yml b/testsuite/keycloak-authz-kraft-tests/docker-compose.yml new file mode 100644 index 00000000..31991b79 --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/docker-compose.yml @@ -0,0 +1,153 @@ +version: '3' + +services: + keycloak: + image: quay.io/keycloak/keycloak:19.0.3-legacy + ports: + - "8080:8080" + - "8443:8443" + volumes: + - ${PWD}/../docker/keycloak/realms:/opt/jboss/keycloak/realms + + entrypoint: "" + + command: + - /bin/bash + - -c + - cd /opt/jboss && /opt/jboss/tools/docker-entrypoint.sh -Dkeycloak.profile.feature.upload_scripts=enabled -b 0.0.0.0 + + environment: + - KEYCLOAK_USER=admin + - KEYCLOAK_PASSWORD=admin + - KEYCLOAK_HTTPS_PORT=8443 + - PROXY_ADDRESS_FORWARDING=true + - KEYCLOAK_IMPORT=/opt/jboss/keycloak/realms/kafka-authz-realm.json + + kafka: + image: ${KAFKA_DOCKER_IMAGE} + ports: + - "9091:9091" + - "9092:9092" + - "9093:9093" + - "9094:9094" + - "9095:9095" + - "9096:9096" + - "9100:9100" + + # Prometheus JMX Exporter + - "9404:9404" + + # javaagent debug port + - "5006:5006" + volumes: + - ${PWD}/../docker/target/kafka/libs:/opt/kafka/libs/strimzi + - ${PWD}/../docker/kafka/config:/opt/kafka/config/strimzi + - ${PWD}/../docker/kafka/scripts:/opt/kafka/strimzi + command: + - /bin/bash + - -c + - cd /opt/kafka/strimzi && ./start.sh --kraft + environment: + + #- KAFKA_DEBUG=y + #- DEBUG_SUSPEND_FLAG=y + #- JAVA_DEBUG_PORT=*:5006 + + # KRaft properties + - KAFKA_PROCESS_ROLES=broker,controller + - KAFKA_NODE_ID=1 + - KAFKA_CONTROLLER_QUORUM_VOTERS=1@kafka:9091 + - KAFKA_CONTROLLER_LISTENER_NAMES=CONTROLLER + - KAFKA_SASL_MECHANISM_CONTROLLER_PROTOCOL=PLAIN + + - KAFKA_LISTENERS=CONTROLLER://kafka:9091,JWT://kafka:9092,INTROSPECT://kafka:9093,JWTPLAIN://kafka:9094,INTROSPECTPLAIN://kafka:9095,JWTREFRESH://kafka:9096,PLAIN://kafka:9100 + - KAFKA_ADVERTISED_LISTENERS=JWT://kafka:9092,INTROSPECT://kafka:9093,JWTPLAIN://kafka:9094,INTROSPECTPLAIN://kafka:9095,JWTREFRESH://kafka:9096,PLAIN://kafka:9100 + - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:SASL_PLAINTEXT,JWT:SASL_PLAINTEXT,INTROSPECT:SASL_PLAINTEXT,JWTPLAIN:SASL_PLAINTEXT,INTROSPECTPLAIN:SASL_PLAINTEXT,JWTREFRESH:SASL_PLAINTEXT,PLAIN:SASL_PLAINTEXT + + - KAFKA_INTER_BROKER_LISTENER_NAME=JWT + - KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL=OAUTHBEARER + + - KAFKA_PRINCIPAL_BUILDER_CLASS=io.strimzi.kafka.oauth.server.OAuthKafkaPrincipalBuilder + + # Common settings for all the listeners + # username extraction from JWT token claim + - OAUTH_USERNAME_CLAIM=preferred_username + - OAUTH_CONNECT_TIMEOUT_SECONDS=20 + + - OAUTH_ENABLE_METRICS=true + + # Configuration of individual listeners + - KAFKA_LISTENER_NAME_CONTROLLER_SASL_ENABLED_MECHANISMS=PLAIN + - KAFKA_LISTENER_NAME_CONTROLLER_PLAIN_SASL_JAAS_CONFIG=org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin-password\" user_admin=\"admin-password\" user_bobby=\"bobby-secret\" ; + + - KAFKA_LISTENER_NAME_JWT_SASL_ENABLED_MECHANISMS=OAUTHBEARER + - KAFKA_LISTENER_NAME_JWT_OAUTHBEARER_SASL_JAAS_CONFIG=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required oauth.jwks.endpoint.uri=\"http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/certs\" oauth.valid.issuer.uri=\"http://keycloak:8080/auth/realms/kafka-authz\" oauth.token.endpoint.uri=\"http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token\" oauth.client.id=\"kafka\" oauth.client.secret=\"kafka-secret\" oauth.groups.claim=\"$$.realm_access.roles\" ; + - KAFKA_LISTENER_NAME_JWT_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS=io.strimzi.kafka.oauth.server.JaasServerOauthValidatorCallbackHandler + - KAFKA_LISTENER_NAME_JWT_OAUTHBEARER_SASL_LOGIN_CALLBACK_HANDLER_CLASS=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler + + - KAFKA_LISTENER_NAME_INTROSPECT_SASL_ENABLED_MECHANISMS=OAUTHBEARER + - KAFKA_LISTENER_NAME_INTROSPECT_OAUTHBEARER_SASL_JAAS_CONFIG=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required oauth.introspection.endpoint.uri=\"http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token/introspect\" oauth.valid.issuer.uri=\"http://keycloak:8080/auth/realms/kafka-authz\" oauth.client.id=\"kafka\" oauth.client.secret=\"kafka-secret\" unsecuredLoginStringClaim_sub=\"admin\" ; + - KAFKA_LISTENER_NAME_INTROSPECT_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS=io.strimzi.kafka.oauth.server.JaasServerOauthValidatorCallbackHandler + + - KAFKA_LISTENER_NAME_JWTPLAIN_SASL_ENABLED_MECHANISMS=PLAIN + - KAFKA_LISTENER_NAME_JWTPLAIN_PLAIN_SASL_JAAS_CONFIG=org.apache.kafka.common.security.plain.PlainLoginModule required oauth.jwks.endpoint.uri=\"http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/certs\" oauth.valid.issuer.uri=\"http://keycloak:8080/auth/realms/kafka-authz\" oauth.token.endpoint.uri=\"http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token\" oauth.client.id=\"kafka\" oauth.client.secret=\"kafka-secret\" unsecuredLoginStringClaim_sub=\"admin\" ; + - KAFKA_LISTENER_NAME_JWTPLAIN_PLAIN_SASL_SERVER_CALLBACK_HANDLER_CLASS=io.strimzi.kafka.oauth.server.plain.JaasServerOauthOverPlainValidatorCallbackHandler + + - KAFKA_LISTENER_NAME_INTROSPECTPLAIN_SASL_ENABLED_MECHANISMS=PLAIN + - KAFKA_LISTENER_NAME_INTROSPECTPLAIN_PLAIN_SASL_JAAS_CONFIG=org.apache.kafka.common.security.plain.PlainLoginModule required oauth.introspection.endpoint.uri=\"http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token/introspect\" oauth.valid.issuer.uri=\"http://keycloak:8080/auth/realms/kafka-authz\" oauth.token.endpoint.uri=\"http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token\" oauth.client.id=\"kafka\" oauth.client.secret=\"kafka-secret\" unsecuredLoginStringClaim_sub=\"admin\" ; + - KAFKA_LISTENER_NAME_INTROSPECTPLAIN_PLAIN_SASL_SERVER_CALLBACK_HANDLER_CLASS=io.strimzi.kafka.oauth.server.plain.JaasServerOauthOverPlainValidatorCallbackHandler + + - KAFKA_LISTENER_NAME_JWTREFRESH_SASL_ENABLED_MECHANISMS=OAUTHBEARER + - KAFKA_LISTENER_NAME_JWTREFRESH_OAUTHBEARER_SASL_JAAS_CONFIG=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required oauth.jwks.endpoint.uri=\"http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/certs\" oauth.valid.issuer.uri=\"http://keycloak:8080/auth/realms/kafka-authz\" oauth.token.endpoint.uri=\"http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token\" oauth.client.id=\"kafka\" oauth.client.secret=\"kafka-secret\" oauth.jwks.refresh.min.pause.seconds=\"2\" unsecuredLoginStringClaim_sub=\"admin\" ; + - KAFKA_LISTENER_NAME_JWTREFRESH_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS=io.strimzi.kafka.oauth.server.JaasServerOauthValidatorCallbackHandler + # Enable re-authentication + - KAFKA_LISTENER_NAME_JWTREFRESH_OAUTHBEARER_CONNECTIONS_MAX_REAUTH_MS=3600000 + + - KAFKA_LISTENER_NAME_PLAIN_SASL_ENABLED_MECHANISMS=PLAIN + - KAFKA_LISTENER_NAME_PLAIN_PLAIN_SASL_JAAS_CONFIG=org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin-password\" user_admin=\"admin-password\" user_bobby=\"bobby-secret\" ; + + + # Authorizer configuration + - KAFKA_AUTHORIZER_CLASS_NAME=io.strimzi.kafka.oauth.server.authorizer.KeycloakAuthorizer + + - KAFKA_STRIMZI_AUTHORIZATION_TOKEN_ENDPOINT_URI=http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token + - KAFKA_STRIMZI_AUTHORIZATION_CLIENT_ID=kafka + - KAFKA_STRIMZI_AUTHORIZATION_CLIENT_SECRET=kafka-secret + - KAFKA_STRIMZI_AUTHORIZATION_KAFKA_CLUSTER_NAME=my-cluster + - KAFKA_STRIMZI_AUTHORIZATION_DELEGATE_TO_KAFKA_ACL=true + - KAFKA_STRIMZI_AUTHORIZATION_READ_TIMEOUT_SECONDS=45 + + # Parameters controlling the refreshing of grants + - KAFKA_STRIMZI_AUTHORIZATION_GRANTS_REFRESH_POOL_SIZE=4 + + # Any change to permissions will be reflected within 10 seconds + # Has to be set to 10 seconds for keycloak-authz*-tests/**/RefreshTest + - KAFKA_STRIMZI_AUTHORIZATION_GRANTS_REFRESH_PERIOD_SECONDS=10 + + # If a grants fetch fails, immediately perform one retry + - KAFKA_STRIMZI_AUTHORIZATION_HTTP_RETRIES=1 + + # Use grants fetched for another session if available + - KAFKA_STRIMZI_AUTHORIZATION_REUSE_GRANTS=true + + - KAFKA_STRIMZI_AUTHORIZATION_ENABLE_METRICS=true + + - KAFKA_SUPER_USERS=User:admin;User:service-account-kafka + + # Other configuration + - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 + + # For start.sh script to know where the keycloak is listening + - KEYCLOAK_HOST=${KEYCLOAK_HOST:-keycloak} + - REALM=${REALM:-kafka-authz} + + kafka-acls: + image: ${KAFKA_DOCKER_IMAGE} + links: + - kafka + volumes: + - ${PWD}/../docker/kafka-acls/scripts:/opt/kafka/strimzi + command: + - /bin/bash + - -c + - cd /opt/kafka/strimzi && ./add-acls.sh diff --git a/testsuite/keycloak-authz-kraft-tests/pom.xml b/testsuite/keycloak-authz-kraft-tests/pom.xml new file mode 100644 index 00000000..91af708b --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/pom.xml @@ -0,0 +1,59 @@ + + + + 4.0.0 + + + io.strimzi.oauth.testsuite + kafka-oauth-testsuite + 1.0.0-SNAPSHOT + + + keycloak-authz-kraft-tests + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + + + + + ../.. + + + + + org.testcontainers + testcontainers + test + + + junit + junit + ${version.junit} + + + io.strimzi.oauth.testsuite + common + + + + io.strimzi + kafka-oauth-common + + + io.strimzi + kafka-oauth-client + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-simple + + + \ No newline at end of file diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/BasicTest.java b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/BasicTest.java new file mode 100644 index 00000000..51f936d5 --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/BasicTest.java @@ -0,0 +1,219 @@ +/* + * Copyright 2017-2020, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.testsuite.oauth.authz; + +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; + +import java.util.Properties; + +import static java.util.Collections.singletonList; + + +public class BasicTest extends Common { + + public BasicTest(String kafkaBootstrap, boolean oauthOverPlain) { + super(kafkaBootstrap, oauthOverPlain); + } + + public void doTest() throws Exception { + + tokens = authenticateAllActors(); + + testTeamAClientPart1(); + + testTeamBClientPart1(); + + createTopicAsClusterManager(); + + testTeamAClientPart2(); + + testTeamBClientPart2(); + + testClusterManager(); + + cleanup(); + } + + void createTopicAsClusterManager() throws Exception { + + Properties bobAdminProps = buildAdminConfigForAccount(BOB); + AdminClient admin = AdminClient.create(bobAdminProps); + + // + // Create x_* topic + // + admin.createTopics(singletonList(new NewTopic(TOPIC_X, 1, (short) 1))).all().get(); + } + + void testClusterManager() throws Exception { + + Properties bobAdminProps = buildProducerConfigForAccount(BOB); + Producer producer = new KafkaProducer<>(bobAdminProps); + + Properties consumerProps = buildConsumerConfigForAccount(BOB); + Consumer consumer = new KafkaConsumer<>(consumerProps); + + // + // bob should succeed producing to x_* topic + // + produce(producer, TOPIC_X); + + // + // bob should succeed producing to a_* topic + // + produce(producer, TOPIC_A); + + // + // bob should succeed producing to b_* topic + // + produce(producer, TOPIC_B); + + // + // bob should succeed producing to non-existing topic + // + produce(producer, "non-existing-topic"); + + // + // bob should succeed consuming from x_* topic + // + consume(consumer, TOPIC_X); + + // + // bob should succeed consuming from a_* topic + // + consume(consumer, TOPIC_A); + + // + // bob should succeed consuming from b_* topic + // + consume(consumer, TOPIC_B); + + // + // bob should succeed consuming from "non-existing-topic" - which now exists + // + consume(consumer, "non-existing-topic"); + } + + void testTeamAClientPart1() throws Exception { + + Producer teamAProducer = getProducer(TEAM_A_CLIENT); + + // + // team-a-client should fail to produce to b_* topic + // + produceFail(teamAProducer, TOPIC_B); + + // Re-init producer because message to topicB is stuck in the queue, and any subsequent message to another queue + // won't be handled until first message makes it through. + teamAProducer = newProducer(TEAM_A_CLIENT); + + // + // team-a-client should succeed producing to a_* topic + // + produce(teamAProducer, TOPIC_A); + + // + // team-a-client should also fail producing to non-existing x_* topic (fails to create it) + // + produceFail(teamAProducer, TOPIC_X); + + Consumer teamAConsumer = newConsumer(TEAM_A_CLIENT, TOPIC_B); + + // + // team-a-client should fail consuming from b_* topic + // + consumeFail(teamAConsumer, TOPIC_B); + + + // Close and re-init consumer + teamAConsumer = newConsumer(TEAM_A_CLIENT, TOPIC_A); + + // + // team-a-client should succeed consuming from a_* topic + // + consume(teamAConsumer, TOPIC_A); + + // + // team-a-client should fail consuming from x_* topic - it doesn't exist + // + consumeFail(teamAConsumer, TOPIC_X); + } + + void testTeamBClientPart1() throws Exception { + + Producer teamBProducer = getProducer(TEAM_B_CLIENT); + + // + // team-b-client should fail to produce to a_* topic + // + produceFail(teamBProducer, TOPIC_A); + + // Re-init producer because message to topicA is stuck in the queue, and any subsequent message to another queue + // won't be handled until first message makes it through. + teamBProducer = newProducer(TEAM_B_CLIENT); + + // + // team-b-client should succeed producing to b_* topic + // + produce(teamBProducer, TOPIC_B); + + // + // team-b-client should fail to produce to x_* topic + // + produceFail(teamBProducer, TOPIC_X); + + + Consumer teamBConsumer = newConsumer(TEAM_B_CLIENT, TOPIC_A); + + // + // team-b-client should fail consuming from a_* topic + // + consumeFail(teamBConsumer, TOPIC_A); + + // Close and re-init consumer + teamBConsumer = newConsumer(TEAM_B_CLIENT, TOPIC_B); + + // + // team-b-client should succeed consuming from b_* topic + // + consume(teamBConsumer, TOPIC_B); + } + + void testTeamAClientPart2() throws Exception { + + // + // team-a-client should succeed producing to existing x_* topic + // + Producer teamAProducer = newProducer(TEAM_A_CLIENT); + + produce(teamAProducer, TOPIC_X); + + // + // team-a-client should fail reading from x_* topic + // + Consumer teamAConsumer = newConsumer(TEAM_A_CLIENT, TOPIC_A); + consumeFail(teamAConsumer, TOPIC_X); + } + + void testTeamBClientPart2() throws Exception { + // + // team-b-client should succeed consuming from x_* topic + // + Consumer teamBConsumer = newConsumer(TEAM_B_CLIENT, TOPIC_B); + consume(teamBConsumer, TOPIC_X); + + + // + // team-b-client should fail producing to x_* topic + // + Producer teamBProducer = newProducer(TEAM_B_CLIENT); + produceFail(teamBProducer, TOPIC_X); + } +} diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/Common.java b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/Common.java new file mode 100644 index 00000000..3b150eea --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/Common.java @@ -0,0 +1,375 @@ +/* + * Copyright 2017-2020, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.testsuite.oauth.authz; + +import com.fasterxml.jackson.databind.JsonNode; +import io.strimzi.kafka.oauth.client.ClientConfig; +import io.strimzi.kafka.oauth.common.HttpUtil; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.errors.TopicAuthorizationException; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.junit.Assert; + +import java.io.IOException; +import java.net.URI; +import java.time.Duration; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +import static io.strimzi.kafka.oauth.common.OAuthAuthenticator.loginWithClientSecret; +import static io.strimzi.kafka.oauth.common.OAuthAuthenticator.urlencode; + +public class Common { + + + static final String HOST = "keycloak"; + static final String REALM = "kafka-authz"; + static final String TOKEN_ENDPOINT_URI = "http://" + HOST + ":8080/auth/realms/" + REALM + "/protocol/openid-connect/token"; + + static final String TEAM_A_CLIENT = "team-a-client"; + static final String TEAM_B_CLIENT = "team-b-client"; + static final String BOB = "bob"; + + static final String TOPIC_A = "a_messages"; + static final String TOPIC_B = "b_messages"; + static final String TOPIC_X = "x_messages"; + + + final String kafkaBootstrap; + + boolean usePlain; + + HashMap tokens; + + Producer teamAProducer; + Consumer teamAConsumer; + + Producer teamBProducer; + Consumer teamBConsumer; + + Common(String kafkaBootstrap, boolean oauthOverPlain) { + this.kafkaBootstrap = kafkaBootstrap; + this.usePlain = oauthOverPlain; + } + + static HashMap authenticateAllActors() throws IOException { + + HashMap tokens = new HashMap<>(); + tokens.put(TEAM_A_CLIENT, loginWithClientSecret(URI.create(TOKEN_ENDPOINT_URI), null, null, + TEAM_A_CLIENT, TEAM_A_CLIENT + "-secret", true, null, null).token()); + tokens.put(TEAM_B_CLIENT, loginWithClientSecret(URI.create(TOKEN_ENDPOINT_URI), null, null, + TEAM_B_CLIENT, TEAM_B_CLIENT + "-secret", true, null, null).token()); + tokens.put(BOB, loginWithUsernamePassword(URI.create(TOKEN_ENDPOINT_URI), + BOB, BOB + "-password", "kafka-cli")); + return tokens; + } + + static void consume(Consumer consumer, String topic) { + TopicPartition partition = new TopicPartition(topic, 0); + consumer.assign(Collections.singletonList(partition)); + + while (consumer.partitionsFor(topic, Duration.ofSeconds(1)).size() == 0) { + System.out.println("No assignment yet for consumer"); + } + + consumer.seekToBeginning(Collections.singletonList(partition)); + ConsumerRecords records = consumer.poll(Duration.ofSeconds(10)); + + Assert.assertTrue("Got message", records.count() >= 1); + } + + static void consumeFail(Consumer consumer, String topic) { + TopicPartition partition = new TopicPartition(topic, 0); + consumer.assign(Collections.singletonList(partition)); + + try { + while (consumer.partitionsFor(topic, Duration.ofSeconds(1)).size() == 0) { + System.out.println("No assignment yet for consumer"); + } + + consumer.seekToBeginning(Collections.singletonList(partition)); + consumer.poll(Duration.ofSeconds(1)); + + Assert.fail("Should fail with TopicAuthorizationException"); + } catch (TopicAuthorizationException expected) { + } + } + + static void produce(Producer producer, String topic) throws Exception { + producer.send(new ProducerRecord<>(topic, "The Message")).get(); + } + + static void produceFail(Producer producer, String topic) throws Exception { + try { + produce(producer, topic); + Assert.fail("Should not be able to send message"); + } catch (ExecutionException e) { + // should get authorization exception + Assert.assertTrue("Should fail with TopicAuthorizationException", e.getCause() instanceof TopicAuthorizationException); + } + } + + static String loginWithUsernamePassword(URI tokenEndpointUri, String username, String password, String clientId) throws IOException { + + String body = "grant_type=password&username=" + urlencode(username) + + "&password=" + urlencode(password) + "&client_id=" + urlencode(clientId); + + JsonNode result = HttpUtil.post(tokenEndpointUri, + null, + null, + null, + "application/x-www-form-urlencoded", + body, + JsonNode.class); + + JsonNode token = result.get("access_token"); + if (token == null) { + throw new IllegalStateException("Invalid response from authorization server: no access_token"); + } + return token.asText(); + } + + Producer getProducer(final String name) { + return recycleProducer(name, true); + } + + Producer newProducer(final String name) { + return recycleProducer(name, false); + } + + Producer recycleProducer(final String name, boolean recycle) { + switch (name) { + case TEAM_A_CLIENT: + if (teamAProducer != null) { + if (recycle) { + return teamAProducer; + } else { + teamAProducer.close(); + } + } + break; + case TEAM_B_CLIENT: + if (teamBProducer != null) { + if (recycle) { + return teamBProducer; + } else { + teamBProducer.close(); + } + } + break; + default: + throw new IllegalArgumentException("Unsupported producer: " + name); + } + + Properties producerProps = buildProducerConfigForAccount(name); + Producer producer = new KafkaProducer<>(producerProps); + + if (TEAM_A_CLIENT.equals(name)) { + teamAProducer = producer; + } else { + teamBProducer = producer; + } + return producer; + } + + Consumer newConsumer(final String name, String topic) { + switch (name) { + case TEAM_A_CLIENT: + if (teamAConsumer != null) { + teamAConsumer.close(); + } + break; + case TEAM_B_CLIENT: + if (teamBConsumer != null) { + teamBConsumer.close(); + } + break; + default: + throw new IllegalArgumentException("Unsupported consumer: " + name); + } + + Properties consumerProps = buildConsumerConfigForAccount(name); + consumerProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupFor(topic)); + Consumer consumer = new KafkaConsumer<>(consumerProps); + + if (TEAM_A_CLIENT.equals(name)) { + teamAConsumer = consumer; + } else { + teamBConsumer = consumer; + } + return consumer; + } + + Properties buildAdminConfigForAccount(String name) { + return buildProducerConfigForAccount(name); + } + + Properties buildProducerConfigForAccount(String name) { + return usePlain + ? buildProducerConfigPlain(kafkaBootstrap, buildAuthConfigForPlain(name)) + : buildProducerConfigOAuthBearer(kafkaBootstrap, buildAuthConfigForOAuthBearer(name)); + } + + Properties buildConsumerConfigForAccount(String name) { + return usePlain + ? buildConsumerConfigPlain(kafkaBootstrap, buildAuthConfigForPlain(name)) + : buildConsumerConfigOAuthBearer(kafkaBootstrap, buildAuthConfigForOAuthBearer(name)); + } + + Properties buildConsumerConfig(String kafkaBootstrap, boolean usePlain, String clientId, String secret) { + return usePlain ? + buildConsumerConfigPlain(kafkaBootstrap, buildAuthConfigForPlain(clientId, secret)) : + buildConsumerConfigOAuthBearer(kafkaBootstrap, buildAuthConfigForOAuthBearer(clientId)); + } + + Map buildAuthConfigForOAuthBearer(String name) { + Map config = new HashMap<>(); + + String token = tokens.get(name); + Assert.assertNotNull("No token for user: " + name + ". Was the user authenticated?", token); + + config.put(ClientConfig.OAUTH_ACCESS_TOKEN, token); + return config; + } + + Map buildAuthConfigForPlain(String name) { + return name.endsWith("-client") + ? buildAuthConfigForPlain(name, name + "-secret") + : buildAuthConfigForPlain(name, "$accessToken:" + tokens.get(name)); + } + + static Map buildAuthConfigForPlain(String clientId, String secret) { + Map config = new HashMap<>(); + config.put("username", clientId); + config.put("password", secret); + return config; + } + + static String groupFor(String topic) { + return topic + "-group"; + } + + static String getJaasConfigOptionsString(Map options) { + StringBuilder sb = new StringBuilder(); + for (Map.Entry ent: options.entrySet()) { + sb.append(" ").append(ent.getKey()).append("=\"").append(ent.getValue()).append("\""); + } + return sb.toString(); + } + + static Properties buildProducerConfigOAuthBearer(String kafkaBootstrap, Map oauthConfig) { + Properties p = buildCommonConfigOAuthBearer(oauthConfig); + setCommonProducerProperties(kafkaBootstrap, p); + return p; + } + + static void setCommonProducerProperties(String kafkaBootstrap, Properties p) { + p.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrap); + p.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + p.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + p.setProperty(ProducerConfig.ACKS_CONFIG, "all"); + p.setProperty(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "5000"); + p.setProperty(ProducerConfig.RETRIES_CONFIG, "10"); + p.setProperty(ProducerConfig.MAX_BLOCK_MS_CONFIG, "300000"); + } + + static Properties buildConsumerConfigOAuthBearer(String kafkaBootstrap, Map oauthConfig) { + Properties p = buildCommonConfigOAuthBearer(oauthConfig); + setCommonConsumerProperties(kafkaBootstrap, p); + return p; + } + + static Properties buildCommonConfigOAuthBearer(Map oauthConfig) { + String configOptions = getJaasConfigOptionsString(oauthConfig); + + Properties p = new Properties(); + p.setProperty("security.protocol", "SASL_PLAINTEXT"); + p.setProperty("sasl.mechanism", "OAUTHBEARER"); + p.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + configOptions + " ;"); + p.setProperty("sasl.login.callback.handler.class", "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + + return p; + } + + Properties buildProducerConfig(String kafkaBootstrap, boolean usePlain, String clientId, String secret) { + return usePlain ? + buildProducerConfigPlain(kafkaBootstrap, buildAuthConfigForPlain(clientId, secret)) : + buildProducerConfigOAuthBearer(kafkaBootstrap, buildAuthConfigForOAuthBearer(clientId)); + } + + static Properties buildProducerConfigPlain(String kafkaBootstrap, Map plainConfig) { + Properties p = buildCommonConfigPlain(plainConfig); + setCommonProducerProperties(kafkaBootstrap, p); + return p; + } + + static Properties buildProducerConfigScram(String kafkaBootstrap, Map scramConfig) { + Properties p = buildCommonConfigScram(scramConfig); + setCommonProducerProperties(kafkaBootstrap, p); + return p; + } + + static Properties buildConsumerConfigPlain(String kafkaBootstrap, Map plainConfig) { + Properties p = buildCommonConfigPlain(plainConfig); + setCommonConsumerProperties(kafkaBootstrap, p); + return p; + } + + static void setCommonConsumerProperties(String kafkaBootstrap, Properties p) { + p.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrap); + p.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + p.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + p.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "consumer-group"); + p.setProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "10"); + p.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + } + + static Properties buildCommonConfigPlain(Map plainConfig) { + String configOptions = getJaasConfigOptionsString(plainConfig); + + Properties p = new Properties(); + p.setProperty("security.protocol", "SASL_PLAINTEXT"); + p.setProperty("sasl.mechanism", "PLAIN"); + p.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required " + configOptions + " ;"); + return p; + } + + static Properties buildCommonConfigScram(Map scramConfig) { + String configOptions = getJaasConfigOptionsString(scramConfig); + + Properties p = new Properties(); + p.setProperty("security.protocol", "SASL_PLAINTEXT"); + p.setProperty("sasl.mechanism", "SCRAM-SHA-512"); + p.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required " + configOptions + " ;"); + return p; + } + + static AdminClient buildAdminClientForPlain(String kafkaBootstrap, String user) { + Properties adminProps = buildProducerConfigPlain(kafkaBootstrap, buildAuthConfigForPlain(user, user + "-password")); + return AdminClient.create(adminProps); + } + + void cleanup() { + Properties bobAdminProps = buildAdminConfigForAccount(BOB); + AdminClient admin = AdminClient.create(bobAdminProps); + + admin.deleteTopics(Arrays.asList(TOPIC_A, TOPIC_B, TOPIC_X, "non-existing-topic")); + admin.deleteConsumerGroups(Arrays.asList(groupFor(TOPIC_A), groupFor(TOPIC_B), groupFor(TOPIC_X), groupFor("non-existing-topic"))); + } +} diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/ConfigurationTest.java b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/ConfigurationTest.java new file mode 100644 index 00000000..a1ac57b7 --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/ConfigurationTest.java @@ -0,0 +1,51 @@ +/* + * Copyright 2017-2021, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.testsuite.oauth.authz; + +import org.junit.Assert; + +import java.util.List; + +import static io.strimzi.testsuite.oauth.common.TestUtil.getContainerLogsForString; + +public class ConfigurationTest { + + private final String kafkaContainer; + + ConfigurationTest(String kafkaContainer) { + this.kafkaContainer = kafkaContainer; + } + + public void doTest() { + // get kafka log and make sure KeycloakRBACAuthorizer has been configured with expected settings + List lines = getContainerLogsForString(kafkaContainer, "Configured KeycloakRBACAuthorizer"); + Assert.assertTrue("Kafka log should contain string: 'KeycloakRBACAuthorizer'", lines.size() > 0); + + String value = getLoggerAttribute(lines, "connectTimeoutSeconds"); + Assert.assertEquals("'connectTimeoutSeconds' should be 20", "20", value); + + value = getLoggerAttribute(lines, "readTimeoutSeconds"); + Assert.assertEquals("'readTimeoutSeconds' should be 45", "45", value); + + value = getLoggerAttribute(lines, "enableMetrics"); + Assert.assertEquals("'enableMetrics' should be true", "true", value); + + value = getLoggerAttribute(lines, "httpRetries"); + Assert.assertEquals("'httpRetries' should be 1", "1", value); + + value = getLoggerAttribute(lines, "reuseGrants"); + Assert.assertEquals("'reuseGrants' should be true", "true", value); + } + + private static String getLoggerAttribute(List lines, String name) { + for (String line: lines) { + if (line.contains(name)) { + String[] keyVal = line.split(":"); + return keyVal[1].trim().split(" ")[0].trim(); + } + } + return null; + } +} diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/FloodTest.java b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/FloodTest.java new file mode 100644 index 00000000..66847432 --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/FloodTest.java @@ -0,0 +1,352 @@ +/* + * Copyright 2017-2021, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.testsuite.oauth.authz; + +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.errors.AuthorizationException; +import org.junit.Assert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InterruptedIOException; +import java.net.URI; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicInteger; + +import static io.strimzi.kafka.oauth.common.OAuthAuthenticator.loginWithClientSecret; +import static io.strimzi.testsuite.oauth.common.TestUtil.getContainerLogsForString; + +public class FloodTest extends Common { + + private static final Logger log = LoggerFactory.getLogger(FloodTest.class); + + private final ArrayList threads = new ArrayList<>(); + + private static AtomicInteger startedCount; + + static int sendLimit = 1; + + private final String kafkaContainer; + + FloodTest(String kafkaContainer, String kafkaBootstrap, boolean oauthOverPlain) { + super(kafkaBootstrap, oauthOverPlain); + this.kafkaContainer = kafkaContainer; + } + + public void doTest() throws IOException { + clientCredentialsWithFloodTest(); + } + + + /** + * This test uses the Kafka listener configured with both OAUTHBEARER and PLAIN. + * + * It connects concurrently with multiple producers with different client IDs using the PLAIN mechanism, testing the OAuth over PLAIN functionality. + * With KeycloakRBACAuthorizer configured, any mixup of the credentials between different clients will be caught as + * AuthorizationException would be thrown trying to write to the topic if the user context was mismatched. + */ + void clientCredentialsWithFloodTest() throws IOException { + + String producerPrefix = "kafka-producer-client-"; + String consumerPrefix = "kafka-consumer-client-"; + + // 10 parallel producers and consumers + final int clientCount = 10; + + if (!usePlain) { + HashMap tokens = new HashMap<>(); + for (int i = 1; i <= clientCount; i++) { + obtainAndStoreToken(producerPrefix, tokens, i); + obtainAndStoreToken(consumerPrefix, tokens, i); + } + this.tokens = tokens; + } + System.out.println(" ==== Test sending to unauthorized topic"); + // Try write to the mismatched topic - we should get AuthorizationException + try { + sendSingleMessage("kafka-producer-client-1", "kafka-producer-client-1-secret", "messages-2"); + + Assert.fail("Sending to 'messages-2' using 'kafka-producer-client-1' should fail with AuthorizationException"); + } catch (InterruptedException e) { + throw new InterruptedIOException("Interrupted"); + } catch (ExecutionException e) { + Assert.assertTrue("Exception type should be AuthorizationException", e.getCause() instanceof AuthorizationException); + } + + + // Do 5 iterations - each time hitting the broker with 10 parallel requests + for (int run = 0; run < 5; run++) { + System.out.println("\n*** Run " + (run + 1) + "/5\n"); + for (int i = 1; i <= clientCount; i++) { + String topic = "messages-" + i; + + addProducerThread(producerPrefix + i, producerPrefix + i + "-secret", topic); + addConsumerThread(consumerPrefix + i, consumerPrefix + i + "-secret", topic, groupForConsumer(i)); + } + + // Start all threads + startThreads(); + + // Wait for all threads to finish + joinThreads(); + + // Check for errors + checkExceptions(); + + // Prepare for the next run + clearThreads(); + } + + System.out.println(); + System.out.println(" ==== Test flooding a single topic using kafka-producer-client-1 and kafka-consumer-client-1"); + System.out.println(); + + // Now try the same with a single topic + for (int run = 0; run < 5; run++) { + + for (int i = 1; i <= clientCount; i++) { + String topic = "messages-1"; + + addProducerThread(producerPrefix + "1", producerPrefix + "1" + "-secret", topic); + addConsumerThread(consumerPrefix + "1", consumerPrefix + "1" + "-secret", topic, groupForConsumer(1)); + } + + // Start all threads + startThreads(); + + // Wait for all threads to finish + joinThreads(); + + // Check for errors + checkExceptions(); + + // Prepare for the next run + clearThreads(); + } + } + + private int currentFoundExistingGrantsLogCount() { + List lines = getContainerLogsForString(kafkaContainer, "Found existing grants for the token on another session"); + return lines.size(); + } + + private int currentSemaphoreBlockLogCount() { + List lines = getContainerLogsForString(kafkaContainer, "Waiting on another thread to get grants"); + return lines.size(); + } + + private void sendSingleMessage(String clientId, String secret, String topic) throws ExecutionException, InterruptedException { + Properties props = buildProducerConfig(kafkaBootstrap, usePlain, clientId, secret); + KafkaProducer producer = new KafkaProducer<>(props); + + producer.send(new ProducerRecord<>(topic, "Message 0")) + .get(); + } + + private String groupForConsumer(int index) { + return "g" + (index < 10 ? index : 0); + } + + private void obtainAndStoreToken(String producerPrefix, HashMap tokens, int i) throws IOException { + String clientId = producerPrefix + i; + String secret = clientId + "-secret"; + + tokens.put(clientId, loginWithClientSecret(URI.create(TOKEN_ENDPOINT_URI), null, null, + clientId, secret, true, null, null).token()); + } + + + public void clearThreads() { + threads.clear(); + } + + public void startThreads() { + startedCount = new AtomicInteger(0); + for (Thread t : threads) { + t.start(); + } + } + + public void joinThreads() { + for (Thread t : threads) { + try { + t.join(); + } catch (InterruptedException e) { + throw new RuntimeException("Interrupted - exiting ..."); + } + } + } + + public void checkExceptions() { + try { + for (Thread t : threads) { + ((ClientJob) t).checkException(); + } + } catch (RuntimeException e) { + throw e; + } catch (Throwable t) { + throw new RuntimeException("Test failed due to: ", t); + } + } + + public void addProducerThread(String clientId, String secret, String topic) { + FloodProducer p = new FloodProducer(clientId, secret, topic); + p.initProducer(); + } + + public void addConsumerThread(String clientId, String secret, String topic, String group) { + FloodConsumer c = new FloodConsumer(clientId, secret, topic, group); + c.initConsumer(); + } + + + static class ClientJob extends Thread { + + final String clientId; + final String secret; + final String topic; + Throwable error; + + ClientJob(String clientId, String secret, String topic) { + this.clientId = clientId; + this.secret = secret; + this.topic = topic; + } + + void checkException() throws Throwable { + if (error != null) { + log.error("Client job error: ", error); + throw error; + } + } + } + + class FloodConsumer extends ClientJob { + + Consumer consumer; + + String group; + + FloodConsumer(String clientId, String secret, String topic, String group) { + super(clientId, secret, topic); + this.group = group; + } + + private void initConsumer() { + Properties props = buildConsumerConfig(kafkaBootstrap, usePlain, clientId, secret); + props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, group); + consumer = new KafkaConsumer<>(props); + setName("FloodConsumer Runner Thread - " + clientId + " - " + threads.size()); + threads.add(this); + } + + public void run() { + int started = startedCount.addAndGet(1); + + try { + while (started < threads.size()) { + Thread.sleep(10); + started = startedCount.get(); + } + + for (int i = 0; i < sendLimit; i++) { + + // This loop ensures some time for topic to be autocreated by producer which has the permissions to create the topic + // Whereas the consumer does not have a permission to create a topic. + for (int triesLeft = 300; triesLeft > 0; triesLeft--) { + try { + consume(consumer, topic); + log.debug("[" + clientId + "] Consumed message from '" + topic + "': Message " + i); + break; + } catch (Throwable t) { + if (triesLeft <= 1) { + throw t; + } + Thread.sleep(100); + } + } + + if (i < sendLimit - 1) { + Thread.sleep(2000); + } + } + } catch (InterruptedException e) { + error = new RuntimeException("Interrupted while consuming!"); + } catch (Throwable t) { + error = t; + } finally { + if (consumer != null) { + consumer.close(); + } + } + if (error != null) { + log.error("[" + clientId + "] failed: ", error); + } + } + } + + class FloodProducer extends ClientJob { + + Producer producer; + + FloodProducer(String clientId, String secret, String topic) { + super(clientId, secret, topic); + } + + private void initProducer() { + Properties props = buildProducerConfig(kafkaBootstrap, usePlain, clientId, secret); + producer = new KafkaProducer<>(props); + setName("FloodProducer Runner Thread - " + clientId + " - " + threads.size()); + threads.add(this); + } + + public void run() { + int started = startedCount.addAndGet(1); + + try { + + while (started < threads.size()) { + Thread.sleep(10); + started = startedCount.get(); + } + + for (int i = 0; i < sendLimit; i++) { + producer.send(new ProducerRecord<>(topic, "Message " + i)) + .get(); + + log.debug("[" + clientId + "] Produced message to '" + topic + "': Message " + i); + + if (i < sendLimit - 1) { + Thread.sleep(2000); + } + } + } catch (InterruptedException e) { + error = new RuntimeException("Interrupted while sending!"); + } catch (ExecutionException e) { + error = new RuntimeException("Failed to send message: ", e); + } catch (Throwable t) { + error = t; + } finally { + if (producer != null) { + producer.close(); + } + } + if (error != null) { + log.error("[" + clientId + "] failed: ", error); + } + } + } + +} diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/KeycloakRaftAuthorizationTests.java b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/KeycloakRaftAuthorizationTests.java new file mode 100644 index 00000000..7bbce089 --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/KeycloakRaftAuthorizationTests.java @@ -0,0 +1,145 @@ +/* + * Copyright 2017-2020, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.testsuite.oauth.authz; + +import io.strimzi.testsuite.oauth.common.TestContainersLogCollector; +import io.strimzi.testsuite.oauth.common.TestContainersWatcher; +import io.strimzi.testsuite.oauth.common.TestUtil; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.common.acl.AccessControlEntryFilter; +import org.apache.kafka.common.acl.AclBinding; +import org.apache.kafka.common.acl.AclBindingFilter; +import org.apache.kafka.common.acl.AclOperation; +import org.apache.kafka.common.acl.AclPermissionType; +import org.apache.kafka.common.resource.ResourcePatternFilter; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.wait.strategy.Wait; + +import java.io.File; +import java.time.Duration; +import java.util.Collection; + +/** + * Tests for OAuth authentication using Keycloak + Keycloak Authorization Services based authorization when Kafka runs in KRaft mode + * + * This test assumes there are multiple listeners configured with OAUTHBEARER or PLAIN support, but each configured differently + * - configured with different options, or different realm. + * + * There is KeycloakAuthorizer configured on the Kafka broker. + */ +public class KeycloakRaftAuthorizationTests { + + @ClassRule + public static TestContainersWatcher environment = + new TestContainersWatcher(new File("docker-compose.yml")) + .withServices("keycloak", "kafka", "kafka-acls") + + // ensure kafka has started + .waitingFor("kafka", Wait.forLogMessage(".*started \\(kafka.server.KafkaRaftServer\\).*", 1) + .withStartupTimeout(Duration.ofSeconds(60))); + + // ensure ACLs for user 'alice' have been added + // Moved into test code: waitForACLs() + // Logging has changed, and it would require very verbose logging to possibly detect this from kafka logs + + // ensure a grants fetch request to 'keycloak' has been performed by authorizer's grants refresh job + // In KRaft mode the inter-broker connection doesn't seem to happen, so there are no OAuth authenticated sessions before starting the tests + //.waitingFor("kafka", Wait.forLogMessage(".*after: \\{\\}.*", 1) + // .withStartupTimeout(Duration.ofSeconds(210))); + + @Rule + public TestRule logCollector = new TestContainersLogCollector(environment); + + private static final Logger log = LoggerFactory.getLogger(KeycloakRaftAuthorizationTests.class); + + private static final String JWT_LISTENER = "kafka:9092"; + private static final String INTROSPECT_LISTENER = "kafka:9093"; + private static final String JWTPLAIN_LISTENER = "kafka:9094"; + private static final String INTROSPECTPLAIN_LISTENER = "kafka:9095"; + private static final String JWTREFRESH_LISTENER = "kafka:9096"; + + private static final String PLAIN_LISTENER = "kafka:9100"; + + @Test + public void doTest() throws Exception { + try { + String kafkaContainer = environment.getContainerByServiceName("kafka_1").get().getContainerInfo().getName().substring(1); + + logStart("KeycloakRaftAuthorizationTest :: ConfigurationTest"); + new ConfigurationTest(kafkaContainer).doTest(); + + logStart("KeycloakRaftAuthorizationTest :: MetricsTest"); + MetricsTest.doTest(); + + // Ensure ACLs have been added to Kafka cluster + waitForACLs(); + + logStart("KeycloakRaftAuthorizationTest :: MultiSaslTests"); + new MultiSaslTest(kafkaContainer).doTest(); + + logStart("KeycloakRaftAuthorizationTest :: JwtValidationAuthzTest"); + new BasicTest(JWT_LISTENER, false).doTest(); + + logStart("KeycloakRaftAuthorizationTest :: IntrospectionValidationAuthzTest"); + new BasicTest(INTROSPECT_LISTENER, false).doTest(); + + logStart("KeycloakRaftAuthorizationTest :: OAuthOverPlain + JwtValidationAuthzTest"); + new OAuthOverPlainTest(JWTPLAIN_LISTENER, true).doTest(); + + logStart("KeycloakRaftAuthorizationTest :: OAuthOverPlain + IntrospectionValidationAuthzTest"); + new OAuthOverPlainTest(INTROSPECTPLAIN_LISTENER, true).doTest(); + + logStart("KeycloakRaftAuthorizationTest :: OAuthOverPLain + FloodTest"); + new FloodTest(kafkaContainer, JWTPLAIN_LISTENER, true).doTest(); + + logStart("KeycloakRaftAuthorizationTest :: JWT FloodTest"); + new FloodTest(kafkaContainer, JWT_LISTENER, false).doTest(); + + logStart("KeycloakRaftAuthorizationTest :: Introspection FloodTest"); + new FloodTest(kafkaContainer, INTROSPECT_LISTENER, false).doTest(); + + // This test has to be the last one - it changes the team-a-client, and team-b-client permissions in Keycloak + logStart("KeycloakRaftAuthorizationTest :: JwtValidationAuthzTest + RefreshGrants"); + new RefreshTest(JWTREFRESH_LISTENER, false).doTest(); + + } catch (Throwable e) { + log.error("Keycloak Raft Authorization Test failed: ", e); + throw e; + } + } + + private void waitForACLs() throws Exception { + + // Create admin client using user `admin:admin-password` over PLAIN listener (port 9100) + AdminClient adminClient = Common.buildAdminClientForPlain(PLAIN_LISTENER, "admin"); + + TestUtil.waitForCondition(() -> { + try { + Collection result = adminClient.describeAcls(new AclBindingFilter(ResourcePatternFilter.ANY, + new AccessControlEntryFilter("User:alice", null, AclOperation.IDEMPOTENT_WRITE, AclPermissionType.ALLOW))).values().get(); + for (AclBinding acl : result) { + if (AclOperation.IDEMPOTENT_WRITE.equals(acl.entry().operation())) { + return true; + } + } + return false; + + } catch (Exception e) { + throw new RuntimeException("ACLs for User:alice could not be retrieved: ", e); + } + }, 500, 210); + } + + private void logStart(String msg) { + System.out.println(); + System.out.println("======== " + msg); + System.out.println(); + } +} diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MetricsTest.java b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MetricsTest.java new file mode 100644 index 00000000..004a4cc2 --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MetricsTest.java @@ -0,0 +1,55 @@ +/* + * Copyright 2017-2020, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.testsuite.oauth.authz; + +import io.strimzi.testsuite.oauth.common.TestMetrics; +import org.junit.Assert; + +import java.math.BigDecimal; +import java.net.URI; + +import static io.strimzi.testsuite.oauth.common.TestMetrics.getPrometheusMetrics; + +public class MetricsTest { + + private static final String AUTH_HOST_PORT = "keycloak:8080"; + private static final String REALM = "kafka-authz"; + private static final String JWKS_PATH = "/auth/realms/" + REALM + "/protocol/openid-connect/certs"; + + public static void doTest() throws Exception { + + TestMetrics metrics = getPrometheusMetrics(URI.create("http://kafka:9404/metrics")); + BigDecimal value = metrics.getValueSum("strimzi_oauth_http_requests_count", "kind", "jwks", "host", AUTH_HOST_PORT, "path", JWKS_PATH, "outcome", "success"); + Assert.assertTrue("strimzi_oauth_http_requests_count for jwks > 0", value.doubleValue() > 0.0); + + value = metrics.getValueSum("strimzi_oauth_http_requests_totaltimems", "kind", "jwks", "host", AUTH_HOST_PORT, "path", JWKS_PATH, "outcome", "success"); + Assert.assertTrue("strimzi_oauth_http_requests_totaltimems for jwks > 0.0", value.doubleValue() > 0.0); + + // Accross all the listeners there should only be 2 client authentication requests - those for inter-broker connection on JWT listener + value = metrics.getValueSum("strimzi_oauth_authentication_requests_count", "kind", "client-auth", "outcome", "success"); + Assert.assertEquals("strimzi_oauth_authentication_requests_count for client-auth == 2", 2, value.intValue()); + + value = metrics.getValueSum("strimzi_oauth_authentication_requests_totaltimems", "kind", "client-auth", "outcome", "success"); + Assert.assertTrue("strimzi_oauth_authentication_requests_totaltimems for client-auth > 0.0", value.doubleValue() > 0.0); + } + + public static void doTest2() throws Exception { + + final String tokenPath = "/auth/realms/" + REALM + "/protocol/openid-connect/token"; + + TestMetrics metrics = getPrometheusMetrics(URI.create("http://kafka:9404/metrics")); + + //// Inter-broker auth triggered the only successful validation request + // No inter-broker auth yet at this point right after server startup ??? + BigDecimal value = metrics.getValueSum("strimzi_oauth_validation_requests_count", "kind", "jwks", "mechanism", "OAUTHBEARER", "outcome", "success"); + Assert.assertEquals("strimzi_oauth_validation_requests_count for jwks == 1", 1, value.intValue()); + + value = metrics.getValueSum("strimzi_oauth_validation_requests_totaltimems", "kind", "jwks", "mechanism", "OAUTHBEARER", "outcome", "success"); + Assert.assertTrue("strimzi_oauth_validation_requests_totaltimems for jwks > 0.0", value.doubleValue() > 0.0); + + value = metrics.getValueSum("strimzi_oauth_http_requests_count", "kind", "keycloak-authorization", "host", AUTH_HOST_PORT, "path", tokenPath, "outcome", "error"); + Assert.assertTrue("strimzi_oauth_http_requests_count for keycloak-authorization > 0.0", value.doubleValue() > 0.0); + } +} diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MultiSaslTest.java b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MultiSaslTest.java new file mode 100644 index 00000000..3db2bb2b --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MultiSaslTest.java @@ -0,0 +1,261 @@ +/* + * Copyright 2017-2021, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.testsuite.oauth.authz; + +import io.strimzi.kafka.oauth.client.ClientConfig; +import io.strimzi.testsuite.oauth.common.TestMetrics; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.junit.Assert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.math.BigDecimal; +import java.net.URI; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import static io.strimzi.testsuite.oauth.authz.Common.buildProducerConfigOAuthBearer; +import static io.strimzi.testsuite.oauth.authz.Common.buildProducerConfigPlain; +//import static io.strimzi.testsuite.oauth.authz.Common.buildProducerConfigScram; +import static io.strimzi.testsuite.oauth.common.TestMetrics.getPrometheusMetrics; +import static io.strimzi.testsuite.oauth.common.TestUtil.getContainerLogsForString; + +public class MultiSaslTest { + + private static final Logger log = LoggerFactory.getLogger(MultiSaslTest.class); + + private static final String PLAIN_LISTENER = "kafka:9100"; + + // No support for SCRAM in KRaft mode + //private static final String SCRAM_LISTENER = "kafka:9101"; + + private static final String JWT_LISTENER = "kafka:9092"; + private static final String JWTPLAIN_LISTENER = "kafka:9094"; + + private final String kafkaContainer; + + MultiSaslTest(String kafkaContainer) { + this.kafkaContainer = kafkaContainer; + } + + public void doTest() throws Exception { + + // bobby:bobby-secret is defined in docker-compose.yaml in the PLAIN listener configuration (port 9100) + String username = "bobby"; + String password = "bobby-secret"; + + // for metrics + String authHostPort = "keycloak:8080"; + String realm = "kafka-authz"; + String tokenPath = "/auth/realms/" + realm + "/protocol/openid-connect/token"; + + // Producing to PLAIN listener using SASL/PLAIN should succeed. + // The necessary ACLs have been added by 'docker/kafka-acls/scripts/add-acls.sh' + Properties producerProps = producerConfigPlain(PLAIN_LISTENER, username, password); + produceToTopic("KeycloakAuthorizationTest-multiSaslTest-plain", producerProps); + + try { + produceToTopic("KeycloakAuthorizationTest-multiSaslTest-plain-denied", producerProps); + Assert.fail("Should have failed"); + } catch (Exception ignored) { + } + + // No support for SCRAM in KRaft mode + // Producing to SCRAM listener using SASL_SCRAM-SHA-512 should fail. + //producerProps = producerConfigScram(SCRAM_LISTENER, username, password); + //try { + // produceToTopic("KeycloakAuthorizationTest-multiSaslTest-scram", producerProps); + // Assert.fail("Should have failed"); + //} catch (Exception ignored) { + //} + + + // No support for SCRAM in KRaft mode + // alice:alice-secret (User 'alice' was configured for SASL SCRAM in 'docker/kafka/scripts/start.sh') + //username = "alice"; + //password = "alice-secret"; + + // Producing to PLAIN listener using SASL/PLAIN should fail. + // User 'alice' _has not_ been configured for PLAIN in PLAIN listener configuration in 'docker-compose.yml' + //producerProps = producerConfigPlain(PLAIN_LISTENER, username, password); + //try { + // produceToTopic("KeycloakAuthorizationTest-multiSaslTest-plain", producerProps); + // Assert.fail("Should have failed"); + //} catch (Exception ignored) { + //} + + // No support for SCRAM in KRaft mode + // Producing to SCRAM listener using SASL_SCRAM-SHA-512 should succeed. + // The necessary ACLs have been added by 'docker/kafka-acls/scripts/add-acls.sh' + //producerProps = producerConfigScram(SCRAM_LISTENER, username, password); + //produceToTopic("KeycloakAuthorizationTest-multiSaslTest-scram", producerProps); + //try { + // produceToTopic("KeycloakAuthorizationTest-multiSaslTest-scram-denied", producerProps); + // Assert.fail("Should have failed"); + //} catch (Exception ignored) { + //} + + // OAuth authentication should fail + //try { + // Common.loginWithUsernamePassword( + // URI.create("http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token"), + // username, password, "kafka-cli"); + + // Assert.fail("Should have failed"); + //} catch (Exception ignored) { + //} + + + // alice:alice-password + username = "alice"; + password = "alice-password"; + + // Producing to PLAIN listener using SASL/PLAIN should fail. + // User 'alice' was not configured in PLAIN listener jaas configuration (port 9100) in 'docker-compose.yml' + producerProps = producerConfigPlain(PLAIN_LISTENER, username, password); + try { + produceToTopic("KeycloakAuthorizationTest-multiSaslTest-plain", producerProps); + Assert.fail("Should have failed"); + } catch (Exception ignored) { + } + + // No support for SCRAM in KRaft mode + // Producing to SCRAM listener using SASL_SCRAM-SHA-512 should fail. + // User 'alice' was configured for SASL in 'docker/kafka/scripts/start.sh' but with a different password + //producerProps = producerConfigScram(SCRAM_LISTENER, username, password); + //try { + // produceToTopic("KeycloakAuthorizationTest-multiSaslTest-scram", producerProps); + // Assert.fail("Should have failed"); + //} catch (Exception ignored) { + //} + + // Test the grants reuse feature + int fetchGrantsCount = currentFetchGrantsLogCount(); + checkAuthorizationGrantsReuse(0); + + // Producing to JWT listener using SASL/OAUTHBEARER using access token should succeed + String accessToken = Common.loginWithUsernamePassword( + URI.create("http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token"), + username, password, "kafka-cli"); + producerProps = producerConfigOAuthBearerAccessToken(JWT_LISTENER, accessToken); + produceToTopic("KeycloakAuthorizationTest-multiSaslTest-oauthbearer", producerProps); + + // Test the grants reuse feature + checkAuthorizationGrantsReuse(2); + checkGrantsFetchCountDiff(fetchGrantsCount); + + //TODO: Remove these + // alice:alice-password + //Properties producerProps; + //String username = "alice"; + //String password = "alice-password"; + //String accessToken = Common.loginWithUsernamePassword( + // URI.create("http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token"), + // username, password, "kafka-cli"); + + + // producing to JWTPLAIN listener using SASL/PLAIN using $accessToken should succeed + producerProps = producerConfigPlain(JWTPLAIN_LISTENER, username, "$accessToken:" + accessToken); + produceToTopic("KeycloakAuthorizationTest-multiSaslTest-oauth-over-plain", producerProps); + + // Test the grants reuse feature + checkGrantsFetchCountDiff(fetchGrantsCount); + + // check metrics + checkAuthorizationRequestsMetrics(authHostPort, tokenPath); + checkGrantsMetrics(authHostPort, tokenPath); + } + + private void checkAuthorizationGrantsReuse(int numberOfReuses) { + List lines = getContainerLogsForString(kafkaContainer, "Found existing grants for the token on another session"); + + if (numberOfReuses == 0) { + Assert.assertEquals("There should be no reuse of existing grants in Kafka log yet", 0, lines.size()); + } else { + Assert.assertTrue("There should be " + numberOfReuses + " reuses of existing grants in Kafka log", lines.size() >= numberOfReuses); + } + } + + private int currentFetchGrantsLogCount() { + List lines = getContainerLogsForString(kafkaContainer, "Fetching grants from Keycloak"); + return lines.size(); + } + + private void checkGrantsFetchCountDiff(int previousFetchGrantsCount) { + int current = currentFetchGrantsLogCount(); + Assert.assertEquals("Expected one grants fetch", 1, current - previousFetchGrantsCount); + } + + private static void checkGrantsMetrics(String authHostPort, String tokenPath) throws IOException { + TestMetrics metrics = getPrometheusMetrics(URI.create("http://kafka:9404/metrics")); + BigDecimal value = metrics.getValueSum("strimzi_oauth_http_requests_count", "kind", "keycloak-authorization", "host", authHostPort, "path", tokenPath, "outcome", "success"); + Assert.assertTrue("strimzi_oauth_http_requests_count for keycloak-authorization > 0", value.intValue() > 0); + + value = metrics.getValueSum("strimzi_oauth_http_requests_totaltimems", "kind", "keycloak-authorization", "host", authHostPort, "path", tokenPath, "outcome", "success"); + Assert.assertTrue("strimzi_oauth_http_requests_totaltimems for keycloak-authorization > 0", value.doubleValue() > 0.0); + + // TODO: Why this fails in KRaft? Why there are 403 responses in Zookeeper mode, but not in KRaft mode + // Apparently the inter-broker session to JWT listener is not attempted in KRaft mode + //value = metrics.getValueSum("strimzi_oauth_http_requests_count", "kind", "keycloak-authorization", "host", authHostPort, "path", tokenPath, "outcome", "error", "status", "403"); + //Assert.assertTrue("strimzi_oauth_http_requests_count with no-grants for keycloak-authorization > 0", value.intValue() > 0); + + //value = metrics.getValueSum("strimzi_oauth_http_requests_totaltimems", "kind", "keycloak-authorization", "host", authHostPort, "path", tokenPath, "outcome", "error", "status", "403"); + //Assert.assertTrue("strimzi_oauth_http_requests_totaltimems with no-grants for keycloak-authorization > 0", value.doubleValue() > 0.0); + } + + private static void checkAuthorizationRequestsMetrics(String authHostPort, String tokenPath) throws IOException { + TestMetrics metrics = getPrometheusMetrics(URI.create("http://kafka:9404/metrics")); + + BigDecimal value = metrics.getValueSum("strimzi_oauth_authorization_requests_count", "kind", "keycloak-authorization", "host", authHostPort, "path", tokenPath, "outcome", "success"); + Assert.assertTrue("strimzi_oauth_authorization_requests_count for successful keycloak-authorization > 0", value.intValue() > 0); + + value = metrics.getValueSum("strimzi_oauth_authorization_requests_totaltimems", "kind", "keycloak-authorization", "host", authHostPort, "path", tokenPath, "outcome", "success"); + Assert.assertTrue("strimzi_oauth_authorization_requests_totaltimems for successful keycloak-authorization > 0", value.doubleValue() > 0.0); + + value = metrics.getValueSum("strimzi_oauth_authorization_requests_count", "kind", "keycloak-authorization", "host", authHostPort, "path", tokenPath, "outcome", "error"); + Assert.assertEquals("strimzi_oauth_authorization_requests_count for failed keycloak-authorization == 0", 0, value.intValue()); + + value = metrics.getValueSum("strimzi_oauth_authorization_requests_totaltimems", "kind", "keycloak-authorization", "host", authHostPort, "path", tokenPath, "outcome", "error"); + Assert.assertEquals("strimzi_oauth_authorization_requests_totaltimems for failed keycloak-authorization == 0", 0.0, value.doubleValue(), 0.0); + } + + // No support for SCRAM in KRaft mode + //private static Properties producerConfigScram(String kafkaBootstrap, String username, String password) { + // Map scramConfig = new HashMap<>(); + // scramConfig.put("username", username); + // scramConfig.put("password", password); + + // return buildProducerConfigScram(kafkaBootstrap, scramConfig); + //} + + private static Properties producerConfigPlain(String kafkaBootstrap, String username, String password) { + Map scramConfig = new HashMap<>(); + scramConfig.put("username", username); + scramConfig.put("password", password); + + return buildProducerConfigPlain(kafkaBootstrap, scramConfig); + } + + private static Properties producerConfigOAuthBearerAccessToken(String kafkaBootstrap, String accessToken) { + Map oauthConfig = new HashMap<>(); + oauthConfig.put(ClientConfig.OAUTH_ACCESS_TOKEN, accessToken); + oauthConfig.put(ClientConfig.OAUTH_USERNAME_CLAIM, "preferred_username"); + + return buildProducerConfigOAuthBearer(kafkaBootstrap, oauthConfig); + } + + private static void produceToTopic(String topic, Properties config) throws Exception { + + Producer producer = new KafkaProducer<>(config); + + producer.send(new ProducerRecord<>(topic, "The Message")).get(); + log.debug("Produced The Message"); + } +} diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/OAuthOverPlainTest.java b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/OAuthOverPlainTest.java new file mode 100644 index 00000000..f7bb5275 --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/OAuthOverPlainTest.java @@ -0,0 +1,12 @@ +/* + * Copyright 2017-2020, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.testsuite.oauth.authz; + +public class OAuthOverPlainTest extends BasicTest { + + public OAuthOverPlainTest(String kafkaBootstrap, boolean oauthOverPlain) { + super(kafkaBootstrap, oauthOverPlain); + } +} diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/RefreshTest.java b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/RefreshTest.java new file mode 100644 index 00000000..4d33bbb4 --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/java/io/strimzi/testsuite/oauth/authz/RefreshTest.java @@ -0,0 +1,243 @@ +/* + * Copyright 2017-2020, Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.testsuite.oauth.authz; + +import com.fasterxml.jackson.databind.JsonNode; +import io.strimzi.kafka.oauth.common.HttpUtil; +import org.apache.kafka.clients.producer.Producer; +import org.junit.Assert; + +import java.io.IOException; +import java.net.URI; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +public class RefreshTest extends BasicTest { + + RefreshTest(String kafkaBootstrap, boolean oauthOverPlain) { + super(kafkaBootstrap, oauthOverPlain); + } + + public void doTest() throws Exception { + + tokens = authenticateAllActors(); + + testTeamAClientPart1(); + + testTeamBClientPart1(); + + createTopicAsClusterManager(); + + testTeamAClientPart2(); + + testTeamBClientPart2(); + + testClusterManager(); + + changePermissionsForClients(); + + // wait 15 seconds for permissions changes to take effect on the broker + Thread.sleep(15000); + + testChangedPermissions(); + + cleanup(); + } + + private void changePermissionsForClients() throws IOException { + + String token = loginWithUsernamePassword(URI.create("http://keycloak:8080/auth/realms/master/protocol/openid-connect/token"), + "admin", "admin", "admin-cli"); + + String authorization = "Bearer " + token; + + // get the id of 'kafka' client + // + // GET http://localhost:8080/auth/admin/realms/kafka-authz/clients?first=0&max=20&search=true + // + // [{"id":"17d150dc-471d-464f-bb45-c038b573e313","clientId":"account","name":"${client_account}","rootUrl":"${authBaseUrl}","baseUrl":"/realms/kafka-authz/account/","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","defaultRoles":["view-profile","manage-account"],"redirectUris":["/realms/kafka-authz/account/*"],"webOrigins":[],"notBefore":0,"bearerOnly":false,"consentRequired":false,"standardFlowEnabled":true,"implicitFlowEnabled":false,"directAccessGrantsEnabled":false,"serviceAccountsEnabled":false,"publicClient":false,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":false,"nodeReRegistrationTimeout":0,"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}},{"id":"0213b06d-3b2b-405e-8b40-d5c0ae59d234","clientId":"account-console","name":"${client_account-console}","rootUrl":"${authBaseUrl}","baseUrl":"/realms/kafka-authz/account/","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","redirectUris":["/realms/kafka-authz/account/*"],"webOrigins":[],"notBefore":0,"bearerOnly":false,"consentRequired":false,"standardFlowEnabled":true,"implicitFlowEnabled":false,"directAccessGrantsEnabled":false,"serviceAccountsEnabled":false,"publicClient":true,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{"pkce.code.challenge.method":"S256"},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":false,"nodeReRegistrationTimeout":0,"protocolMappers":[{"id":"176454d1-0065-462a-94e7-9b37fcfea4ab","name":"audience resolve","protocol":"openid-connect","protocolMapper":"oidc-audience-resolve-mapper","consentRequired":false,"config":{}}],"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}},{"id":"d8338593-961f-4039-8705-22914ac44603","clientId":"admin-cli","name":"${client_admin-cli}","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","redirectUris":[],"webOrigins":[],"notBefore":0,"bearerOnly":false,"consentRequired":false,"standardFlowEnabled":false,"implicitFlowEnabled":false,"directAccessGrantsEnabled":true,"serviceAccountsEnabled":false,"publicClient":true,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":false,"nodeReRegistrationTimeout":0,"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}},{"id":"018d3790-f070-4c0f-9b45-600b690b78d8","clientId":"broker","name":"${client_broker}","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","redirectUris":[],"webOrigins":[],"notBefore":0,"bearerOnly":false,"consentRequired":false,"standardFlowEnabled":true,"implicitFlowEnabled":false,"directAccessGrantsEnabled":false,"serviceAccountsEnabled":false,"publicClient":false,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":false,"nodeReRegistrationTimeout":0,"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}},{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","clientId":"kafka","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","redirectUris":[],"webOrigins":[],"notBefore":0,"bearerOnly":false,"consentRequired":false,"standardFlowEnabled":false,"implicitFlowEnabled":false,"directAccessGrantsEnabled":true,"serviceAccountsEnabled":true,"authorizationServicesEnabled":true,"publicClient":false,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":true,"nodeReRegistrationTimeout":-1,"protocolMappers":[{"id":"49b90fc0-6b50-4528-8988-e5af613ba2b1","name":"Client ID","protocol":"openid-connect","protocolMapper":"oidc-usersessionmodel-note-mapper","consentRequired":false,"config":{"user.session.note":"clientId","id.token.claim":"true","access.token.claim":"true","claim.name":"clientId","jsonType.label":"String"}},{"id":"d52c6693-ee15-462a-95ba-3d2329d84805","name":"Client Host","protocol":"openid-connect","protocolMapper":"oidc-usersessionmodel-note-mapper","consentRequired":false,"config":{"user.session.note":"clientHost","id.token.claim":"true","access.token.claim":"true","claim.name":"clientHost","jsonType.label":"String"}},{"id":"34f58858-e21f-4c9b-b091-e3c68dca1307","name":"Client IP Address","protocol":"openid-connect","protocolMapper":"oidc-usersessionmodel-note-mapper","consentRequired":false,"config":{"user.session.note":"clientAddress","id.token.claim":"true","access.token.claim":"true","claim.name":"clientAddress","jsonType.label":"String"}}],"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}},{"id":"79a562db-f666-401f-bd34-a27fc1183ad5","clientId":"kafka-cli","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","redirectUris":[],"webOrigins":[],"notBefore":0,"bearerOnly":false,"consentRequired":false,"standardFlowEnabled":false,"implicitFlowEnabled":false,"directAccessGrantsEnabled":true,"serviceAccountsEnabled":false,"publicClient":true,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":true,"nodeReRegistrationTimeout":-1,"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}},{"id":"cfcb911f-9dd4-4f86-bc45-3468b4515df9","clientId":"realm-management","name":"${client_realm-management}","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","redirectUris":[],"webOrigins":[],"notBefore":0,"bearerOnly":true,"consentRequired":false,"standardFlowEnabled":true,"implicitFlowEnabled":false,"directAccessGrantsEnabled":false,"serviceAccountsEnabled":false,"publicClient":false,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":false,"nodeReRegistrationTimeout":0,"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}},{"id":"488f2812-8efd-4626-9df5-37d3e31a2ad4","clientId":"security-admin-console","name":"${client_security-admin-console}","rootUrl":"${authAdminUrl}","baseUrl":"/admin/kafka-authz/console/","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","redirectUris":["/admin/kafka-authz/console/*"],"webOrigins":["+"],"notBefore":0,"bearerOnly":false,"consentRequired":false,"standardFlowEnabled":true,"implicitFlowEnabled":false,"directAccessGrantsEnabled":false,"serviceAccountsEnabled":false,"publicClient":true,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{"pkce.code.challenge.method":"S256"},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":false,"nodeReRegistrationTimeout":0,"protocolMappers":[{"id":"64b56942-4724-4b9a-b349-f817de5b34d3","name":"locale","protocol":"openid-connect","protocolMapper":"oidc-usermodel-attribute-mapper","consentRequired":false,"config":{"userinfo.token.claim":"true","user.attribute":"locale","id.token.claim":"true","access.token.claim":"true","claim.name":"locale","jsonType.label":"String"}}],"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}},{"id":"7f99c353-23a6-49c4-9243-5b033acc9b38","clientId":"team-a-client","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","redirectUris":[],"webOrigins":[],"notBefore":0,"bearerOnly":false,"consentRequired":false,"standardFlowEnabled":false,"implicitFlowEnabled":false,"directAccessGrantsEnabled":true,"serviceAccountsEnabled":true,"publicClient":false,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":true,"nodeReRegistrationTimeout":-1,"protocolMappers":[{"id":"475b3b86-9abd-4f42-a985-9d2100a20a8c","name":"Client IP Address","protocol":"openid-connect","protocolMapper":"oidc-usersessionmodel-note-mapper","consentRequired":false,"config":{"user.session.note":"clientAddress","id.token.claim":"true","access.token.claim":"true","claim.name":"clientAddress","jsonType.label":"String"}},{"id":"72b74e2e-8c03-4cfd-aa4d-f81739a0f337","name":"Client ID","protocol":"openid-connect","protocolMapper":"oidc-usersessionmodel-note-mapper","consentRequired":false,"config":{"user.session.note":"clientId","id.token.claim":"true","access.token.claim":"true","claim.name":"clientId","jsonType.label":"String"}},{"id":"d69c1bd7-ad2a-4d9b-b89e-e4d7605021c3","name":"Client Host","protocol":"openid-connect","protocolMapper":"oidc-usersessionmodel-note-mapper","consentRequired":false,"config":{"user.session.note":"clientHost","id.token.claim":"true","access.token.claim":"true","claim.name":"clientHost","jsonType.label":"String"}}],"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}},{"id":"d6770bfa-e4b8-43bd-9973-b63ab34da150","clientId":"team-b-client","surrogateAuthRequired":false,"enabled":true,"alwaysDisplayInConsole":false,"clientAuthenticatorType":"client-secret","redirectUris":[],"webOrigins":[],"notBefore":0,"bearerOnly":false,"consentRequired":false,"standardFlowEnabled":false,"implicitFlowEnabled":false,"directAccessGrantsEnabled":true,"serviceAccountsEnabled":true,"publicClient":false,"frontchannelLogout":false,"protocol":"openid-connect","attributes":{},"authenticationFlowBindingOverrides":{},"fullScopeAllowed":true,"nodeReRegistrationTimeout":-1,"protocolMappers":[{"id":"0f7e822f-2170-4f54-8e6d-6e514d48b26e","name":"Client IP Address","protocol":"openid-connect","protocolMapper":"oidc-usersessionmodel-note-mapper","consentRequired":false,"config":{"user.session.note":"clientAddress","id.token.claim":"true","access.token.claim":"true","claim.name":"clientAddress","jsonType.label":"String"}},{"id":"ce6ee84f-4a6a-4969-bebe-7caf3b066680","name":"Client ID","protocol":"openid-connect","protocolMapper":"oidc-usersessionmodel-note-mapper","consentRequired":false,"config":{"user.session.note":"clientId","id.token.claim":"true","access.token.claim":"true","claim.name":"clientId","jsonType.label":"String"}},{"id":"fddf832d-1e35-4707-961d-58537d6b7fc7","name":"Client Host","protocol":"openid-connect","protocolMapper":"oidc-usersessionmodel-note-mapper","consentRequired":false,"config":{"user.session.note":"clientHost","id.token.claim":"true","access.token.claim":"true","claim.name":"clientHost","jsonType.label":"String"}}],"defaultClientScopes":["web-origins","role_list","roles","profile","email"],"optionalClientScopes":["address","phone","offline_access","microprofile-jwt"],"access":{"view":true,"configure":true,"manage":true}}] + + String clientsUrl = "http://keycloak:8080/auth/admin/realms/kafka-authz/clients"; + JsonNode result = HttpUtil.get(URI.create(clientsUrl), authorization, JsonNode.class); + + String clientId = null; + + Iterator it = result.elements(); + while (it.hasNext()) { + JsonNode client = it.next(); + if ("kafka".equals(client.get("clientId").asText())) { + clientId = client.get("id").asText(); + break; + } + } + + Assert.assertNotNull("Client 'kafka'", clientId); + + // get the ids of all the resources - extract the ids of 'Topic:a_*' and 'kafka-cluster:my-cluster,Topic:b_*' + // + // GET http://localhost:8080/auth/admin/realms/kafka-authz/clients/f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8/authz/resource-server/resource?deep=false&first=0&max=20 + // + // [{"name":"Cluster:*","type":"Cluster","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"_id":"4ba45366-17b8-4087-8f04-61dcfe955a19","uris":[]},{"name":"Group:*","type":"Group","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"displayName":"Any group","_id":"299327bc-d97c-4e09-abac-cad81370d79d","uris":[]},{"name":"Group:a_*","type":"Group","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"displayName":"Groups that start with a_","_id":"f97f031a-367b-47c5-bb93-c60362c56e0f","uris":[]},{"name":"Group:x_*","type":"Group","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"displayName":"Consumer groups that start with x_","_id":"4dcaa3f7-6b07-401e-8d23-a7616e7a4b1c","uris":[]},{"name":"Topic:*","type":"Topic","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"displayName":"Any topic","_id":"3dd0074b-b155-4e8c-872a-16033a1927fd","uris":[]},{"name":"Topic:a_*","type":"Topic","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"displayName":"Topics that start with a_","_id":"32a72563-080b-4c0e-a149-0ae0b52dcec9","uris":[]},{"name":"Topic:x_*","type":"Topic","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"displayName":"Topics that start with x_","_id":"b89421df-3e16-4256-891c-ab0914abfd16","uris":[]},{"name":"kafka-cluster:my-cluster,Cluster:*","type":"Cluster","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"displayName":"Cluster scope on my-cluster","_id":"d15001bd-2362-4fd1-8356-b538ae29935e","uris":[]},{"name":"kafka-cluster:my-cluster,Group:*","type":"Group","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"displayName":"Any group on my-cluster","_id":"b56c6111-1a0b-41b0-8b6d-bc10e6929ac3","uris":[]},{"name":"kafka-cluster:my-cluster,Topic:*","type":"Topic","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"displayName":"Any topic on my-cluster","_id":"018d340f-76cb-4af9-87ef-effc2fc6f341","uris":[]},{"name":"kafka-cluster:my-cluster,Topic:b_*","type":"Topic","owner":{"id":"f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8","name":"kafka"},"ownerManagedAccess":false,"_id":"cc9d9f37-d7a0-4227-803d-456db28b264c","uris":[]}] + + Map resources = getAuthzResources(authorization, clientId); + + String aTopicsId = resources.get("Topic:a_*"); + String bTopicsId = resources.get("kafka-cluster:my-cluster,Topic:b_*"); + + Assert.assertNotNull("Resource for a_* topics", aTopicsId); + Assert.assertNotNull("Resource for b_* topics", bTopicsId); + + // get the ids of all the action scopes - extract the ids of 'Describe' and 'Write' + // + // GET http://localhost:8080/auth/admin/realms/kafka-authz/clients/f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8/authz/resource-server/scope?deep=false&first=0&max=20 + // + // [{"id":"d83d4331-1f0d-42b5-85f0-92dd151fb714","name":"Alter"},{"id":"2e06038d-2fd7-4d28-9b4a-df99bf70061a","name":"AlterConfigs"},{"id":"f9e931a9-3837-4d4d-bf4f-669482dcfc1e","name":"ClusterAction"},{"id":"a79302ba-6e19-4dfb-9f44-96f0b47d1974","name":"Create"},{"id":"fd8dad1f-72ec-4aa5-8942-d257cd2491a4","name":"Delete"},{"id":"a4cb81e8-fb23-4507-a23b-d1720f51140f","name":"Describe"},{"id":"eb3a8e4d-1cda-418d-8e16-f374f3868fe5","name":"DescribeConfigs"},{"id":"a7790937-9114-4d0f-9dc2-6072a9639844","name":"IdempotentWrite"},{"id":"3390f8f5-e2f7-48b4-91a0-273bfb9ff70c","name":"Read"},{"id":"5146366c-e26b-4ad9-8afc-31bd0055fe36","name":"Write"}] + + Map scopes = getAuthzScopes(authorization, clientId); + + String describeScope = scopes.get("Describe"); + String writeScope = scopes.get("Write"); + + Assert.assertNotNull("'Describe' scope", describeScope); + Assert.assertNotNull("'Write' scope'", writeScope); + + // get the ids of all the policies - extract the ids of 'Dev Team A' and 'Dev Team B' + // + // GET http://localhost:8080/auth/admin/realms/kafka-authz/clients/f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8/authz/resource-server/policy?first=0&max=20&permission=false + // + // [{"id":"b62eeb43-cb4c-46e7-9689-b1133d751259","name":"ClusterManager Group","type":"group","logic":"POSITIVE","decisionStrategy":"UNANIMOUS","config":{"groups":"[{\"id\":\"7fd99c0b-feb0-4ff3-b1dc-8a2771bb80fa\",\"extendChildren\":false}]"}},{"id":"4029b042-4ee9-4eff-89c4-705177980f1b","name":"ClusterManager of my-cluster Group","type":"group","logic":"POSITIVE","decisionStrategy":"UNANIMOUS","config":{"groups":"[{\"id\":\"15035a89-f2c8-4aa6-a5ba-58f741c0ab20\",\"extendChildren\":false}]"}},{"id":"836b4fa0-c08f-4a30-be3a-cd80b7804348","name":"Default Policy","description":"A policy that grants access only for users within this realm","type":"js","logic":"POSITIVE","decisionStrategy":"AFFIRMATIVE","config":{"code":"// by default, grants any permission associated with this policy\n$evaluation.grant();\n"}},{"id":"1e9d9afe-a379-4f46-9d1a-9a7b10131110","name":"Dev Team A","type":"role","logic":"POSITIVE","decisionStrategy":"UNANIMOUS","config":{"roles":"[{\"id\":\"31e22e14-35c9-4dab-880f-50aef22de65c\",\"required\":true}]"}},{"id":"64a5b2d7-5aad-4e84-bedc-d49a851604aa","name":"Dev Team B","type":"role","logic":"POSITIVE","decisionStrategy":"UNANIMOUS","config":{"roles":"[{\"id\":\"58df35c5-6fae-43dc-9d25-5f50de22d8d8\",\"required\":true}]"}},{"id":"a1f3c6b5-1cdc-4886-a62f-a24d1404090e","name":"Ops Team","type":"role","logic":"POSITIVE","decisionStrategy":"UNANIMOUS","config":{"roles":"[{\"id\":\"b24efefd-dc67-4209-a65f-4d710a0b235c\",\"required\":true}]"}}] + + Map policies = getAuthzPolicies(authorization, clientId); + + String devTeamA = policies.get("Dev Team A"); + String devTeamB = policies.get("Dev Team B"); + + Assert.assertNotNull("'Dev Team A' policy", devTeamA); + Assert.assertNotNull("'Dev Team B' policy", devTeamB); + + // get the ids of all the permissions + // + // GET http://localhost:8080/auth/admin/realms/kafka-authz/clients/32795fd2-0438-4593-b1d1-26939c89c1fa/authz/resource-server/permission?first=0&max=20 + // + // [{"id":"d70bddaa-87bd-4bcd-ad78-be5fbb9cdce5","name":"ClusterManager Group has full access to cluster config","type":"resource","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"71738668-69ed-4348-a387-bcec52d82bb0","name":"ClusterManager Group has full access to manage and affect groups","type":"resource","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"55bc1419-41c2-4644-a2b4-676fd142b748","name":"ClusterManager Group has full access to manage and affect topics","type":"resource","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"a7ad2f09-532d-4f69-93d3-99a8b832d290","name":"ClusterManager of my-cluster Group has full access to cluster config on my-cluster","type":"resource","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"21aaff2f-1195-4819-a369-c79d62f4fa93","name":"ClusterManager of my-cluster Group has full access to consumer groups on my-cluster","type":"resource","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"a32879e4-90f7-4e5e-9be6-b90119e1078f","name":"ClusterManager of my-cluster Group has full access to topics on my-cluster","type":"resource","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"f4122657-bebd-447d-9bb8-f0daaffecbbd","name":"Dev Team A can use consumer groups that start with a_ on any cluster","type":"resource","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"4fe8f3fc-ab62-4a5c-819d-4c854297a15f","name":"Dev Team A can write to topics that start with x_ on any cluster","type":"scope","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"72583910-d3c5-4c09-a84b-8301520f2dd8","name":"Dev Team A owns topics that start with a_ on any cluster","type":"resource","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"fddc0f62-0e56-47ed-abb7-d67d06496189","name":"Dev Team B can read from topics that start with x_ on any cluster","type":"scope","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"0ec06a05-2f52-4387-bd59-bec07b76b839","name":"Dev Team B can update consumer group offsets that start with x_ on any cluster","type":"scope","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"},{"id":"8ccbc5dc-2855-4e5c-abca-214d6c87f807","name":"Dev Team B owns topics that start with b_ on cluster my-cluster","type":"resource","logic":"POSITIVE","decisionStrategy":"UNANIMOUS"}] + + String permissionsUrl = "http://keycloak:8080/auth/admin/realms/kafka-authz/clients/" + clientId + "/authz/resource-server/permission"; + result = HttpUtil.get(URI.create(permissionsUrl), authorization, JsonNode.class); + + String devTeamAPermission = null; + String devTeamBPermission = null; + + it = result.elements(); + while (it.hasNext()) { + JsonNode permission = it.next(); + if (permission.get("name").asText().startsWith("Dev Team A owns")) { + devTeamAPermission = permission.get("id").asText(); + } else if (permission.get("name").asText().startsWith("Dev Team B owns")) { + devTeamBPermission = permission.get("id").asText(); + } + } + + Assert.assertNotNull("'Dev Team A owns' permission", devTeamAPermission); + Assert.assertNotNull("'Dev Team B owns' permission", devTeamBPermission); + + // Grant team-a-client the permission to write to b_* topics, + // and team-b-client the permissions to write to a_* topics + + addPermissions(authorization, clientId, describeScope, writeScope, aTopicsId, bTopicsId, devTeamA, devTeamB); + + // Remove the ownership permissions to a_* topics from team-a-client + // and the ownership permissions to b_* topics from team-b-client + + removePermissions(authorization, clientId, devTeamAPermission, devTeamBPermission); + } + + private void removePermissions(String authorization, String clientId, String devTeamAPermission, String devTeamBPermission) throws IOException { + String permissionUrl = "http://keycloak:8080/auth/admin/realms/kafka-authz/clients/" + clientId + "/authz/resource-server/permission/" + devTeamAPermission; + HttpUtil.delete(URI.create(permissionUrl), authorization); + + permissionUrl = "http://keycloak:8080/auth/admin/realms/kafka-authz/clients/" + clientId + "/authz/resource-server/permission/" + devTeamBPermission; + HttpUtil.delete(URI.create(permissionUrl), authorization); + } + + private void addPermissions(String authorization, String clientId, String describeScope, String writeScope, String aTopicsId, String bTopicsId, String devTeamA, String devTeamB) throws IOException { + + // Create a new scope permission using the cluster-name:my-cluster,Topic:b_* resource id, 'Describe' and 'Write' scope ids, and 'Dev Team A' policy id + // + // POST http://localhost:8080/auth/admin/realms/kafka-authz/clients/f71ed8e3-8a4a-47f0-b2b5-0971bd93cea8/authz/resource-server/permission/scope + // + // {"type":"scope","logic":"POSITIVE","decisionStrategy":"UNANIMOUS","name":"Team B client can produce to topics starting with a_","resources":["32a72563-080b-4c0e-a149-0ae0b52dcec9"],"scopes":["5146366c-e26b-4ad9-8afc-31bd0055fe36","a4cb81e8-fb23-4507-a23b-d1720f51140f"],"policies":["64a5b2d7-5aad-4e84-bedc-d49a851604aa"]} + + String bodyPattern = "{\"type\":\"scope\",\"logic\":\"POSITIVE\",\"decisionStrategy\":\"UNANIMOUS\"" + + ",\"name\":\"%s\",\"resources\":[\"%s\"]" + + ",\"scopes\":[\"%s\",\"%s\"],\"policies\":[\"%s\"]}"; + + String permissionUrl = "http://keycloak:8080/auth/admin/realms/kafka-authz/clients/" + clientId + "/authz/resource-server/permission/scope"; + + String body = String.format(bodyPattern, "Dev Team A can write to topics that start with b_", + bTopicsId, describeScope, writeScope, devTeamA); + HttpUtil.post(URI.create(permissionUrl), authorization, "application/json", body, JsonNode.class); + + + // Repeat for Dev Team B by using the Topic:a_* resource id, 'Describe' and 'Write' scope ids, and 'Dev Team B' policy id + + body = String.format(bodyPattern, "Dev Team B can write to topics that start with a_", + aTopicsId, describeScope, writeScope, devTeamB); + HttpUtil.post(URI.create(permissionUrl), authorization, "application/json", body, JsonNode.class); + } + + private Map getAuthzScopes(String authorization, String clientId) throws IOException { + String scopesUrl = "http://keycloak:8080/auth/admin/realms/kafka-authz/clients/" + clientId + "/authz/resource-server/scope"; + return getAuthzList(URI.create(scopesUrl), authorization, "name", "id"); + } + + private Map getAuthzResources(String authorization, String clientId) throws IOException { + String resourcesUrl = "http://keycloak:8080/auth/admin/realms/kafka-authz/clients/" + clientId + "/authz/resource-server/resource"; + return getAuthzList(URI.create(resourcesUrl), authorization, "name", "_id"); + } + + private Map getAuthzPolicies(String authorization, String clientId) throws IOException { + String policiesUrl = "http://keycloak:8080/auth/admin/realms/kafka-authz/clients/" + clientId + "/authz/resource-server/policy"; + return getAuthzList(URI.create(policiesUrl), authorization, "name", "id"); + } + + private Map getAuthzList(URI url, String authorization, String keyAttr, String valueAttr) throws IOException { + JsonNode result = HttpUtil.get(url, authorization, JsonNode.class); + + Map items = new HashMap<>(); + Iterator it = result.elements(); + while (it.hasNext()) { + JsonNode resource = it.next(); + items.put(resource.get(keyAttr).asText(), resource.get(valueAttr).asText()); + } + return items; + } + + private void testChangedPermissions() throws Exception { + Producer teamAProducer = getProducer(TEAM_A_CLIENT); + // + // team-a-client should now succeed to produce to b_* topic + // + produce(teamAProducer, TOPIC_B); + + // + // team-a-client should no longer be able to write to a_* topic + // + produceFail(teamAProducer, TOPIC_A); + + + Producer teamBProducer = getProducer(TEAM_B_CLIENT); + // + // team-b-client should now succeed to produce to a_* topic + // + produce(teamBProducer, TOPIC_A); + + // + // team-b-client should no longer be able to write to b_* topic + // + produceFail(teamBProducer, TOPIC_B); + } + +} diff --git a/testsuite/keycloak-authz-kraft-tests/src/test/resources/simplelogger.properties b/testsuite/keycloak-authz-kraft-tests/src/test/resources/simplelogger.properties new file mode 100644 index 00000000..53f71fbd --- /dev/null +++ b/testsuite/keycloak-authz-kraft-tests/src/test/resources/simplelogger.properties @@ -0,0 +1,2 @@ +org.slf4j.simpleLogger.log.org.apache.kafka=OFF +org.slf4j.simpleLogger.log.io.strimzi=INFO \ No newline at end of file diff --git a/testsuite/keycloak-authz-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MultiSaslTest.java b/testsuite/keycloak-authz-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MultiSaslTest.java index 41e0c9da..1f18ac79 100644 --- a/testsuite/keycloak-authz-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MultiSaslTest.java +++ b/testsuite/keycloak-authz-tests/src/test/java/io/strimzi/testsuite/oauth/authz/MultiSaslTest.java @@ -44,7 +44,7 @@ public class MultiSaslTest { public void doTest() throws Exception { - // bobby:bobby-secret + // bobby:bobby-secret is defined in docker-compose.yaml in the PLAIN listener configuration (port 9100) String username = "bobby"; String password = "bobby-secret"; @@ -74,7 +74,7 @@ public void doTest() throws Exception { } - // alice:alice-secret + // alice:alice-secret (user 'alice' has been configured for SCRAM in 'docker/kafka/scripts/start.sh') username = "alice"; password = "alice-secret"; @@ -122,7 +122,7 @@ public void doTest() throws Exception { } // Producing to SCRAM listener using SASL_SCRAM-SHA-512 should fail. - // User 'alice' was configured for SASL in 'docker/kafka/scripts/start.sh' but with a different password + // User 'alice' was configured for SASL SCRAM in 'docker/kafka/scripts/start.sh' but with a different password producerProps = producerConfigScram(SCRAM_LISTENER, username, password); try { produceToTopic("KeycloakAuthorizationTest-multiSaslTest-scram", producerProps); diff --git a/testsuite/mockoauth-tests/src/test/java/io/strimzi/testsuite/oauth/mockoauth/Common.java b/testsuite/mockoauth-tests/src/test/java/io/strimzi/testsuite/oauth/mockoauth/Common.java index 2c4e890d..9ef4bec2 100644 --- a/testsuite/mockoauth-tests/src/test/java/io/strimzi/testsuite/oauth/mockoauth/Common.java +++ b/testsuite/mockoauth-tests/src/test/java/io/strimzi/testsuite/oauth/mockoauth/Common.java @@ -284,7 +284,7 @@ public static String getProjectRoot() { static class MockBearerTokenWithPayload implements BearerTokenWithPayload { private final TokenInfo ti; - private Object payload; + private JsonNode payload; MockBearerTokenWithPayload(TokenInfo ti) { if (ti == null) { @@ -294,12 +294,12 @@ static class MockBearerTokenWithPayload implements BearerTokenWithPayload { } @Override - public Object getPayload() { + public JsonNode getPayload() { return payload; } @Override - public void setPayload(Object value) { + public void setPayload(JsonNode value) { payload = value; } diff --git a/testsuite/pom.xml b/testsuite/pom.xml index 6e3a4448..49c2007b 100644 --- a/testsuite/pom.xml +++ b/testsuite/pom.xml @@ -17,6 +17,7 @@ mockoauth-tests keycloak-auth-tests keycloak-authz-tests + keycloak-authz-kraft-tests keycloak-errors-tests hydra-test @@ -46,7 +47,7 @@ 1.0.0-SNAPSHOT .. - quay.io/strimzi/kafka:0.33.2-kafka-3.4.0 + quay.io/strimzi/kafka:0.34.0-kafka-3.4.0 @@ -397,7 +398,7 @@ kafka-3_4_0 - quay.io/strimzi/kafka:0.33.2-kafka-3.4.0 + quay.io/strimzi/kafka:0.34.0-kafka-3.4.0