Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

3.6.5 backports 1 #38109

Merged
merged 24 commits into from
Jan 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
96c4d2d
RR client: fix chunking between \n\n in SSEParser #37625
FroMage Dec 20, 2023
8e33f89
Choose correct level field value and type to avoid non-resolution error
dmlloyd Dec 21, 2023
9bfa40d
Update `jboss-logmanager` to 3.0.4.Final
dmlloyd Dec 20, 2023
4bbd759
Make sure all testing frameworks use `ExtHandler`
dmlloyd Dec 21, 2023
635aebc
Bump org.bouncycastle:bctls-fips from 1.0.17 to 1.0.18
dependabot[bot] Dec 22, 2023
3390752
Reset CSRF cookie to minimize a risk of failures due to its expiry
sberyozkin Dec 13, 2023
ce5ec52
Update getting-started-testing.adoc
weltonrodrigo Dec 25, 2023
cb9e118
Update Strimzi container images in docs
scholzj Dec 28, 2023
a9bcc13
Do not expand config properties for Gradle Workers
radcortez Jan 2, 2024
27fc3e5
Upgrade to Kotlin 1.9.22
gsmet Dec 21, 2023
7430ec6
Mention exit handler parameter variant of Quarkus.run
geoand Jan 3, 2024
5779695
Register JsonSubTypes.Type values for native mode
geoand Jan 3, 2024
08c3e9a
Add native test for verifying JsonSubTypes.Type handling
geoand Jan 3, 2024
6d2b36e
Fix the status is a couple extensions documentation pages
geoand Jan 3, 2024
9c042cf
Don't fail if config is not a directory
geoand Jan 3, 2024
23ab911
Set the correct port properties for HTTPS
rob-spoor Jan 3, 2024
3af3724
Properly take Quarkus HTTP body configuration into account for File body
geoand Aug 31, 2023
d8bc51a
Add support for Path as a JAX-RS method body type
geoand Aug 31, 2023
f2f4762
Don't want about missing JSON when returning String
geoand Jan 5, 2024
fffa654
Add hint for Scheduled.ApplicationNotRunning skip predicate
HerrDerb Jan 4, 2024
f9377cd
Add companion classes to Kotlin reflective hierarchy registration
geoand Jan 5, 2024
9472824
Always set ssl and alpn for non-plain-text with Vert.x gRPC channel
alesj Jan 8, 2024
842b3fe
Stork path param resolution fix: use raw path and avoid double encodi…
aureamunoz Dec 14, 2023
315a254
Make Picocli version providers unremovable classes
geoand Jan 8, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions bom/application/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
<angus-activation.version>2.0.1</angus-activation.version>
<bouncycastle.version>1.77</bouncycastle.version>
<bouncycastle.fips.version>1.0.2.4</bouncycastle.fips.version>
<bouncycastle.tls.fips.version>1.0.17</bouncycastle.tls.fips.version>
<bouncycastle.tls.fips.version>1.0.18</bouncycastle.tls.fips.version>
<expressly.version>5.0.0</expressly.version>
<findbugs.version>3.0.2</findbugs.version>
<jandex.version>3.1.6</jandex.version>
Expand Down Expand Up @@ -157,14 +157,14 @@
<aws-xray.version>2.14.0</aws-xray.version>
<azure-functions-java-library.version>2.2.0</azure-functions-java-library.version>
<azure-functions-java-spi.version>1.0.0</azure-functions-java-spi.version>
<kotlin.version>1.9.21</kotlin.version>
<kotlin.version>1.9.22</kotlin.version>
<kotlin.coroutine.version>1.7.3</kotlin.coroutine.version>
<azure.toolkit-lib.version>0.27.0</azure.toolkit-lib.version>
<kotlin-serialization.version>1.6.2</kotlin-serialization.version>
<dekorate.version>4.0.3</dekorate.version> <!-- Please check with Java Operator SDK team before updating -->
<maven-invoker.version>3.2.0</maven-invoker.version>
<awaitility.version>4.2.0</awaitility.version>
<jboss-logmanager.version>3.0.2.Final</jboss-logmanager.version>
<jboss-logmanager.version>3.0.4.Final</jboss-logmanager.version>
<flyway.version>9.22.3</flyway.version>
<yasson.version>3.0.3</yasson.version>
<!-- liquibase-mongodb is not released everytime with liquibase anymore, but the two versions need to be compatible -->
Expand Down
2 changes: 1 addition & 1 deletion build-parent/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

<!-- These properties are needed in order for them to be resolvable by the generated projects -->
<compiler-plugin.version>3.11.0</compiler-plugin.version>
<kotlin.version>1.9.21</kotlin.version>
<kotlin.version>1.9.22</kotlin.version>
<dokka.version>1.9.10</dokka.version>
<scala.version>2.13.8</scala.version>
<scala-maven-plugin.version>4.8.1</scala-maven-plugin.version>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -619,10 +619,23 @@ private static BiFunction<MethodCreator, FieldDescriptor, BranchResult> generate
}

private static ResultHandle getLogManagerLevelIntValue(String levelName, BytecodeCreator method) {
final ResultHandle infoLevel = method.readStaticField(
FieldDescriptor.of(org.jboss.logmanager.Level.class, levelName, org.jboss.logmanager.Level.class));
FieldDescriptor fd;
switch (levelName) {
case "FATAL":
case "ERROR":
case "WARN":
case "INFO":
case "DEBUG":
case "TRACE":
fd = FieldDescriptor.of(org.jboss.logmanager.Level.class, levelName, org.jboss.logmanager.Level.class);
break;
default:
fd = FieldDescriptor.of(Level.class, levelName, Level.class);
break;
}
final ResultHandle levelVal = method.readStaticField(fd);
return method
.invokeVirtualMethod(MethodDescriptor.ofMethod(Level.class, "intValue", int.class), infoLevel);
.invokeVirtualMethod(MethodDescriptor.ofMethod(Level.class, "intValue", int.class), levelVal);
}

private static void generateDefaultLoggingLogger(Level minLevel, ClassOutput output) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package io.quarkus.deployment.steps;

import static io.quarkus.deployment.steps.KotlinUtil.isKotlinClass;

import java.lang.reflect.Modifier;
import java.util.ArrayDeque;
import java.util.Deque;
Expand Down Expand Up @@ -27,6 +29,8 @@
import org.jboss.jandex.VoidType;
import org.jboss.logging.Logger;

import io.quarkus.deployment.Capabilities;
import io.quarkus.deployment.Capability;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.builditem.CombinedIndexBuildItem;
Expand All @@ -51,7 +55,7 @@ public ReflectiveHierarchyIgnoreWarningBuildItem ignoreJavaClassWarnings() {
}

@BuildStep
public void build(CombinedIndexBuildItem combinedIndexBuildItem,
public void build(CombinedIndexBuildItem combinedIndexBuildItem, Capabilities capabilities,
List<ReflectiveHierarchyBuildItem> hierarchy,
List<ReflectiveHierarchyIgnoreWarningBuildItem> ignored,
List<ReflectiveClassFinalFieldsWritablePredicateBuildItem> finalFieldsWritablePredicates,
Expand All @@ -73,7 +77,7 @@ public void build(CombinedIndexBuildItem combinedIndexBuildItem,
final Deque<ReflectiveHierarchyVisitor> visits = new ArrayDeque<>();

for (ReflectiveHierarchyBuildItem i : hierarchy) {
addReflectiveHierarchy(combinedIndexBuildItem,
addReflectiveHierarchy(combinedIndexBuildItem, capabilities,
i,
i.hasSource() ? i.getSource() : i.getType().name().toString(),
i.getType(),
Expand Down Expand Up @@ -128,7 +132,7 @@ private void removeIgnored(Map<DotName, Set<String>> unindexedClasses,
}

private void addReflectiveHierarchy(CombinedIndexBuildItem combinedIndexBuildItem,
ReflectiveHierarchyBuildItem reflectiveHierarchyBuildItem, String source, Type type,
Capabilities capabilities, ReflectiveHierarchyBuildItem reflectiveHierarchyBuildItem, String source, Type type,
Set<DotName> processedReflectiveHierarchies, Map<DotName, Set<String>> unindexedClasses,
Predicate<ClassInfo> finalFieldsWritable, BuildProducer<ReflectiveClassBuildItem> reflectiveClass,
Deque<ReflectiveHierarchyVisitor> visits) {
Expand All @@ -142,45 +146,50 @@ private void addReflectiveHierarchy(CombinedIndexBuildItem combinedIndexBuildIte
return;
}

addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, type.name(), type.name(),
addClassTypeHierarchy(combinedIndexBuildItem, capabilities, reflectiveHierarchyBuildItem, source, type.name(),
type.name(),
processedReflectiveHierarchies, unindexedClasses,
finalFieldsWritable, reflectiveClass, visits);

for (ClassInfo subclass : combinedIndexBuildItem.getIndex().getAllKnownSubclasses(type.name())) {
addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, subclass.name(),
addClassTypeHierarchy(combinedIndexBuildItem, capabilities, reflectiveHierarchyBuildItem, source,
subclass.name(),
subclass.name(),
processedReflectiveHierarchies,
unindexedClasses, finalFieldsWritable, reflectiveClass, visits);
}
for (ClassInfo subclass : combinedIndexBuildItem.getIndex().getAllKnownImplementors(type.name())) {
addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, subclass.name(),
addClassTypeHierarchy(combinedIndexBuildItem, capabilities, reflectiveHierarchyBuildItem, source,
subclass.name(),
subclass.name(),
processedReflectiveHierarchies,
unindexedClasses, finalFieldsWritable, reflectiveClass, visits);
}
} else if (type instanceof ArrayType) {
visits.addLast(() -> addReflectiveHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source,
visits.addLast(() -> addReflectiveHierarchy(combinedIndexBuildItem, capabilities,
reflectiveHierarchyBuildItem, source,
type.asArrayType().constituent(),
processedReflectiveHierarchies,
unindexedClasses, finalFieldsWritable, reflectiveClass, visits));
} else if (type instanceof ParameterizedType) {
if (!reflectiveHierarchyBuildItem.getIgnoreTypePredicate().test(type.name())) {
addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, type.name(),
addClassTypeHierarchy(combinedIndexBuildItem, capabilities, reflectiveHierarchyBuildItem, source, type.name(),
type.name(),
processedReflectiveHierarchies,
unindexedClasses, finalFieldsWritable, reflectiveClass, visits);
}
final ParameterizedType parameterizedType = (ParameterizedType) type;
for (Type typeArgument : parameterizedType.arguments()) {
visits.addLast(
() -> addReflectiveHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, typeArgument,
() -> addReflectiveHierarchy(combinedIndexBuildItem, capabilities, reflectiveHierarchyBuildItem, source,
typeArgument,
processedReflectiveHierarchies,
unindexedClasses, finalFieldsWritable, reflectiveClass, visits));
}
}
}

private void addClassTypeHierarchy(CombinedIndexBuildItem combinedIndexBuildItem,
private void addClassTypeHierarchy(CombinedIndexBuildItem combinedIndexBuildItem, Capabilities capabilities,
ReflectiveHierarchyBuildItem reflectiveHierarchyBuildItem,
String source,
DotName name,
Expand Down Expand Up @@ -223,7 +232,7 @@ private void addClassTypeHierarchy(CombinedIndexBuildItem combinedIndexBuildItem
return;
}

visits.addLast(() -> addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source,
visits.addLast(() -> addClassTypeHierarchy(combinedIndexBuildItem, capabilities, reflectiveHierarchyBuildItem, source,
info.superName(), initialName,
processedReflectiveHierarchies,
unindexedClasses, finalFieldsWritable, reflectiveClass, visits));
Expand All @@ -237,7 +246,8 @@ private void addClassTypeHierarchy(CombinedIndexBuildItem combinedIndexBuildItem
}
final Type fieldType = getFieldType(combinedIndexBuildItem, initialName, info, field);
visits.addLast(
() -> addReflectiveHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, fieldType,
() -> addReflectiveHierarchy(combinedIndexBuildItem, capabilities, reflectiveHierarchyBuildItem, source,
fieldType,
processedReflectiveHierarchies,
unindexedClasses, finalFieldsWritable, reflectiveClass, visits));
}
Expand All @@ -249,11 +259,30 @@ private void addClassTypeHierarchy(CombinedIndexBuildItem combinedIndexBuildItem
method.returnType().kind() == Kind.VOID) {
continue;
}
visits.addLast(() -> addReflectiveHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source,
visits.addLast(() -> addReflectiveHierarchy(combinedIndexBuildItem, capabilities,
reflectiveHierarchyBuildItem, source,
method.returnType(),
processedReflectiveHierarchies,
unindexedClasses, finalFieldsWritable, reflectiveClass, visits));
}

// for Kotlin classes, we need to register the nested classes as well because companion classes are very often necessary at runtime
if (capabilities.isPresent(Capability.KOTLIN) && isKotlinClass(info)) {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
try {
Class<?>[] declaredClasses = classLoader.loadClass(info.name().toString()).getDeclaredClasses();
for (Class<?> clazz : declaredClasses) {
DotName dotName = DotName.createSimple(clazz.getName());
addClassTypeHierarchy(combinedIndexBuildItem, capabilities, reflectiveHierarchyBuildItem, source,
dotName, dotName,
processedReflectiveHierarchies, unindexedClasses,
finalFieldsWritable, reflectiveClass, visits);
}
} catch (ClassNotFoundException e) {
log.warnf(e, "Failed to load Class %s", info.name().toString());
}

}
}

private static Type getFieldType(CombinedIndexBuildItem combinedIndexBuildItem, DotName initialName, ClassInfo info,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import java.net.URI;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.NotDirectoryException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
Expand Down Expand Up @@ -186,6 +187,9 @@ public static Set<String> configFiles(Path configFilesLocation) throws IOExcepti
for (Path candidate : candidates) {
configFiles.add(candidate.toUri().toURL().toString());
}
} catch (NotDirectoryException ignored) {
log.debugf("File %s is not a directory", configFilesLocation.toAbsolutePath());
return Collections.emptySet();
}
return configFiles;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import io.quarkus.gradle.QuarkusPlugin;
import io.quarkus.gradle.tasks.worker.BuildWorker;
import io.quarkus.maven.dependency.GACTV;
import io.smallrye.config.Expressions;

/**
* Base class for the {@link QuarkusBuildDependencies}, {@link QuarkusBuildCacheableAppParts}, {@link QuarkusBuild} tasks
Expand Down Expand Up @@ -207,12 +208,14 @@ void generateBuild() {

ApplicationModel appModel = resolveAppModelForBuild();
Map<String, String> configMap = new HashMap<>();
for (Map.Entry<String, String> entry : extension().buildEffectiveConfiguration(appModel.getAppArtifact()).configMap()
.entrySet()) {
if (entry.getKey().startsWith("quarkus.")) {
configMap.put(entry.getKey(), entry.getValue());
EffectiveConfig effectiveConfig = extension().buildEffectiveConfiguration(appModel.getAppArtifact());
Expressions.withoutExpansion(() -> {
for (Map.Entry<String, String> entry : effectiveConfig.configMap().entrySet()) {
if (entry.getKey().startsWith("quarkus.")) {
configMap.put(entry.getKey(), effectiveConfig.config().getRawValue(entry.getKey()));
}
}
}
});

getLogger().info("Starting Quarkus application build for package type {}", packageType);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ public void shouldReturnMultipleOutputSourceDirectories() {

@Test
public void shouldNotFailOnProjectDependenciesWithoutMain(@TempDir Path testProjectDir) throws IOException {
var kotlinVersion = System.getProperty("kotlin_version", "1.9.21");
var kotlinVersion = System.getProperty("kotlin_version", "1.9.22");
var settingFile = testProjectDir.resolve("settings.gradle.kts");
var mppProjectDir = testProjectDir.resolve("mpp");
var quarkusProjectDir = testProjectDir.resolve("quarkus");
Expand Down
2 changes: 1 addition & 1 deletion devtools/gradle/gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
plugin-publish = "1.2.1"

# updating Kotlin here makes QuarkusPluginTest > shouldNotFailOnProjectDependenciesWithoutMain(Path) fail
kotlin = "1.9.21"
kotlin = "1.9.22"
smallrye-config = "3.4.4"

junit5 = "5.10.1"
Expand Down
2 changes: 0 additions & 2 deletions docs/src/main/asciidoc/cache.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ include::_attributes.adoc[]

In this guide, you will learn how to enable application data caching in any CDI managed bean of your Quarkus application.

include::{includes}/extension-status.adoc[]

== Prerequisites

include::{includes}/prerequisites.adoc[]
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/asciidoc/getting-started-testing.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -1088,7 +1088,7 @@ or starting a mock HTTP server using https://wiremock.org/[Wiremock] (an example


=== Altering the test class
When creating a custom `QuarkusTestResourceLifecycleManager` that needs to inject the something into the test class, the `inject` methods can be used.
When creating a custom `QuarkusTestResourceLifecycleManager` that needs to inject something into the test class, the `inject` methods can be used.
If for example you have a test like the following:

[source,java]
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/asciidoc/kafka-dev-services.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ For Strimzi, you can select any image with a Kafka version which has Kraft suppo

[source, properties]
----
quarkus.kafka.devservices.image-name=quay.io/strimzi-test-container/test-container:0.100.0-kafka-3.1.0
quarkus.kafka.devservices.image-name=quay.io/strimzi-test-container/test-container:0.105.0-kafka-3.6.0
----

== Configuring Kafka topics
Expand Down
4 changes: 2 additions & 2 deletions docs/src/main/asciidoc/kafka-reactive-getting-started.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ version: '3.5'
services:

zookeeper:
image: quay.io/strimzi/kafka:0.23.0-kafka-2.8.0
image: quay.io/strimzi/kafka:0.39.0-kafka-3.6.1
command: [
"sh", "-c",
"bin/zookeeper-server-start.sh config/zookeeper.properties"
Expand All @@ -420,7 +420,7 @@ services:
- kafka-quickstart-network

kafka:
image: quay.io/strimzi/kafka:0.23.0-kafka-2.8.0
image: quay.io/strimzi/kafka:0.39.0-kafka-3.6.1
command: [
"sh", "-c",
"bin/kafka-server-start.sh config/server.properties --override listeners=$${KAFKA_LISTENERS} --override advertised.listeners=$${KAFKA_ADVERTISED_LISTENERS} --override zookeeper.connect=$${KAFKA_ZOOKEEPER_CONNECT}"
Expand Down
8 changes: 4 additions & 4 deletions docs/src/main/asciidoc/kafka-schema-registry-avro.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ version: '2'
services:

zookeeper:
image: quay.io/strimzi/kafka:0.22.1-kafka-2.7.0
image: quay.io/strimzi/kafka:0.39.0-kafka-3.6.1
command: [
"sh", "-c",
"bin/zookeeper-server-start.sh config/zookeeper.properties"
Expand All @@ -335,7 +335,7 @@ services:
LOG_DIR: /tmp/logs

kafka:
image: quay.io/strimzi/kafka:0.22.1-kafka-2.7.0
image: quay.io/strimzi/kafka:0.39.0-kafka-3.6.1
command: [
"sh", "-c",
"bin/kafka-server-start.sh config/server.properties --override listeners=$${KAFKA_LISTENERS} --override advertised.listeners=$${KAFKA_ADVERTISED_LISTENERS} --override zookeeper.connect=$${KAFKA_ZOOKEEPER_CONNECT}"
Expand Down Expand Up @@ -545,7 +545,7 @@ If we couldn't use Dev Services and wanted to start a Kafka broker and Apicurio
<dependency>
<groupId>io.strimzi</groupId>
<artifactId>strimzi-test-container</artifactId>
<version>0.22.1</version>
<version>0.105.0</version>
<scope>test</scope>
<exclusions>
<exclusion>
Expand All @@ -559,7 +559,7 @@ If we couldn't use Dev Services and wanted to start a Kafka broker and Apicurio
[source,gradle,role="secondary asciidoc-tabs-target-sync-gradle"]
.build.gradle
----
testImplementation("io.strimzi:strimzi-test-container:0.22.1") {
testImplementation("io.strimzi:strimzi-test-container:0.105.0") {
exclude group: "org.apache.logging.log4j", module: "log4j-core"
}
----
Expand Down
4 changes: 2 additions & 2 deletions docs/src/main/asciidoc/kafka-streams.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,7 @@ version: '3.5'

services:
zookeeper:
image: strimzi/kafka:0.19.0-kafka-2.5.0
image: quay.io/strimzi/kafka:0.39.0-kafka-3.6.1
command: [
"sh", "-c",
"bin/zookeeper-server-start.sh config/zookeeper.properties"
Expand All @@ -511,7 +511,7 @@ services:
networks:
- kafkastreams-network
kafka:
image: strimzi/kafka:0.19.0-kafka-2.5.0
image: quay.io/strimzi/kafka:0.39.0-kafka-3.6.1
command: [
"sh", "-c",
"bin/kafka-server-start.sh config/server.properties --override listeners=$${KAFKA_LISTENERS} --override advertised.listeners=$${KAFKA_ADVERTISED_LISTENERS} --override zookeeper.connect=$${KAFKA_ZOOKEEPER_CONNECT} --override num.partitions=$${KAFKA_NUM_PARTITIONS}"
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/asciidoc/kafka.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -2268,7 +2268,7 @@ The configuration of the created Kafka broker can be customized using `@Resource
[source,java]
----
@QuarkusTestResource(value = KafkaCompanionResource.class, initArgs = {
@ResourceArg(name = "strimzi.kafka.image", value = "quay.io/strimzi/kafka:0.28.0-kafka-3.0.0"), // Image name
@ResourceArg(name = "strimzi.kafka.image", value = "quay.io/strimzi-test-container/test-container:0.105.0-kafka-3.6.0"), // Image name
@ResourceArg(name = "kafka.port", value = "9092"), // Fixed port for kafka, by default it will be exposed on a random port
@ResourceArg(name = "kraft", value = "true"), // Enable Kraft mode
@ResourceArg(name = "num.partitions", value = "3"), // Other custom broker configurations
Expand Down
Loading