Skip to content

Commit

Permalink
feat: use Connect default precision for Avro decimals if unspecified …
Browse files Browse the repository at this point in the history
…(MINOR) (#7615)
  • Loading branch information
vcrfxia authored Jun 2, 2021
1 parent b751cad commit 1abdb0d
Show file tree
Hide file tree
Showing 6 changed files with 431 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
public final class DecimalUtil {

public static final String PRECISION_FIELD = "connect.decimal.precision";
public static final int PRECISION_DEFAULT = 64;

private DecimalUtil() {
}
Expand Down Expand Up @@ -103,7 +104,7 @@ public static int precision(final Schema schema) {
requireDecimal(schema);
final String precisionString = schema.parameters().get(PRECISION_FIELD);
if (precisionString == null) {
throw new KsqlException("Invalid Decimal schema: precision parameter not found.");
return PRECISION_DEFAULT;
}

try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,19 @@ public void shouldExtractScaleFromDecimalSchema() {
@Test
public void shouldExtractPrecisionFromDecimalSchema() {
// When:
final int scale = DecimalUtil.precision(DECIMAL_SCHEMA);
final int precision = DecimalUtil.precision(DECIMAL_SCHEMA);

// Then:
assertThat(scale, is(2));
assertThat(precision, is(2));
}

@Test
public void shouldUseDefaultPrecisionIfNotPresentInSchema() {
// When:
final int precision = DecimalUtil.precision(decimalSchemaWithoutPrecision(3));

// Then:
assertThat(precision, is(64));
}

@Test
Expand Down Expand Up @@ -690,4 +699,11 @@ public void shouldConvertFromBigDecimalWithNegativeScale() {
is(SqlTypes.decimal(4, 0))
);
}

private static Schema decimalSchemaWithoutPrecision(final int scale) {
return org.apache.kafka.connect.data.Decimal
.builder(scale)
.optional()
.build();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
{
"plan" : [ {
"@type" : "ksqlPlanV1",
"statementText" : "CREATE STREAM INPUT (ROWKEY DECIMAL(64, 2) KEY, FOO INTEGER) WITH (FORMAT='AVRO', KAFKA_TOPIC='input_topic', KEY_SCHEMA_ID=1);",
"ddlCommand" : {
"@type" : "createStreamV1",
"sourceName" : "INPUT",
"schema" : "`ROWKEY` DECIMAL(64, 2) KEY, `FOO` INTEGER",
"topicName" : "input_topic",
"formats" : {
"keyFormat" : {
"format" : "AVRO",
"properties" : {
"fullSchemaName" : "io.confluent.ksql.avro_schemas.InputKey"
}
},
"valueFormat" : {
"format" : "AVRO"
},
"keyFeatures" : [ "UNWRAP_SINGLES" ]
},
"orReplace" : false
}
}, {
"@type" : "ksqlPlanV1",
"statementText" : "CREATE STREAM OUTPUT AS SELECT *\nFROM INPUT INPUT\nEMIT CHANGES",
"ddlCommand" : {
"@type" : "createStreamV1",
"sourceName" : "OUTPUT",
"schema" : "`ROWKEY` DECIMAL(64, 2) KEY, `FOO` INTEGER",
"topicName" : "OUTPUT",
"formats" : {
"keyFormat" : {
"format" : "AVRO",
"properties" : {
"fullSchemaName" : "io.confluent.ksql.avro_schemas.OutputKey"
}
},
"valueFormat" : {
"format" : "AVRO"
},
"keyFeatures" : [ "UNWRAP_SINGLES" ]
},
"orReplace" : false
},
"queryPlan" : {
"sources" : [ "INPUT" ],
"sink" : "OUTPUT",
"physicalPlan" : {
"@type" : "streamSinkV1",
"properties" : {
"queryContext" : "OUTPUT"
},
"source" : {
"@type" : "streamSelectV1",
"properties" : {
"queryContext" : "Project"
},
"source" : {
"@type" : "streamSourceV1",
"properties" : {
"queryContext" : "KsqlTopic/Source"
},
"topicName" : "input_topic",
"formats" : {
"keyFormat" : {
"format" : "AVRO",
"properties" : {
"fullSchemaName" : "io.confluent.ksql.avro_schemas.InputKey"
}
},
"valueFormat" : {
"format" : "AVRO"
},
"keyFeatures" : [ "UNWRAP_SINGLES" ]
},
"sourceSchema" : "`ROWKEY` DECIMAL(64, 2) KEY, `FOO` INTEGER"
},
"keyColumnNames" : [ "ROWKEY" ],
"selectExpressions" : [ "FOO AS FOO" ]
},
"formats" : {
"keyFormat" : {
"format" : "AVRO",
"properties" : {
"fullSchemaName" : "io.confluent.ksql.avro_schemas.OutputKey"
}
},
"valueFormat" : {
"format" : "AVRO"
},
"keyFeatures" : [ "UNWRAP_SINGLES" ]
},
"topicName" : "OUTPUT"
},
"queryId" : "CSAS_OUTPUT_0"
}
} ],
"configs" : {
"ksql.extension.dir" : "ext",
"ksql.streams.cache.max.bytes.buffering" : "0",
"ksql.security.extension.class" : null,
"metric.reporters" : "",
"ksql.transient.prefix" : "transient_",
"ksql.query.status.running.threshold.seconds" : "300",
"ksql.streams.default.deserialization.exception.handler" : "io.confluent.ksql.errors.LogMetricAndContinueExceptionHandler",
"ksql.output.topic.name.prefix" : "",
"ksql.query.pull.enable.standby.reads" : "false",
"ksql.persistence.default.format.key" : "KAFKA",
"ksql.query.persistent.max.bytes.buffering.total" : "-1",
"ksql.queryanonymizer.logs_enabled" : "true",
"ksql.query.error.max.queue.size" : "10",
"ksql.variable.substitution.enable" : "true",
"ksql.internal.topic.min.insync.replicas" : "1",
"ksql.streams.shutdown.timeout.ms" : "300000",
"ksql.internal.topic.replicas" : "1",
"ksql.insert.into.values.enabled" : "true",
"ksql.query.pull.max.allowed.offset.lag" : "9223372036854775807",
"ksql.query.pull.max.qps" : "2147483647",
"ksql.access.validator.enable" : "auto",
"ksql.streams.bootstrap.servers" : "localhost:0",
"ksql.queryanonymizer.cluster_namespace" : null,
"ksql.query.pull.metrics.enabled" : "true",
"ksql.create.or.replace.enabled" : "true",
"ksql.metrics.extension" : null,
"ksql.hidden.topics" : "_confluent.*,__confluent.*,_schemas,__consumer_offsets,__transaction_state,connect-configs,connect-offsets,connect-status,connect-statuses",
"ksql.cast.strings.preserve.nulls" : "true",
"ksql.authorization.cache.max.entries" : "10000",
"ksql.pull.queries.enable" : "true",
"ksql.lambdas.enabled" : "true",
"ksql.suppress.enabled" : "false",
"ksql.query.push.scalable.enabled" : "false",
"ksql.sink.window.change.log.additional.retention" : "1000000",
"ksql.readonly.topics" : "_confluent.*,__confluent.*,_schemas,__consumer_offsets,__transaction_state,connect-configs,connect-offsets,connect-status,connect-statuses",
"ksql.query.persistent.active.limit" : "2147483647",
"ksql.persistence.wrap.single.values" : null,
"ksql.authorization.cache.expiry.time.secs" : "30",
"ksql.query.retry.backoff.initial.ms" : "15000",
"ksql.query.transient.max.bytes.buffering.total" : "-1",
"ksql.schema.registry.url" : "",
"ksql.properties.overrides.denylist" : "",
"ksql.query.pull.max.concurrent.requests" : "2147483647",
"ksql.streams.auto.offset.reset" : "earliest",
"ksql.connect.url" : "http://localhost:8083",
"ksql.service.id" : "some.ksql.service.id",
"ksql.streams.default.production.exception.handler" : "io.confluent.ksql.errors.ProductionExceptionHandlerUtil$LogAndFailProductionExceptionHandler",
"ksql.query.pull.interpreter.enabled" : "true",
"ksql.streams.commit.interval.ms" : "2000",
"ksql.query.pull.table.scan.enabled" : "false",
"ksql.streams.auto.commit.interval.ms" : "0",
"ksql.streams.topology.optimization" : "all",
"ksql.query.retry.backoff.max.ms" : "900000",
"ksql.streams.num.stream.threads" : "4",
"ksql.timestamp.throw.on.invalid" : "false",
"ksql.metrics.tags.custom" : "",
"ksql.persistence.default.format.value" : null,
"ksql.udfs.enabled" : "true",
"ksql.udf.enable.security.manager" : "true",
"ksql.connect.worker.config" : "",
"ksql.nested.error.set.null" : "true",
"ksql.udf.collect.metrics" : "false",
"ksql.query.pull.thread.pool.size" : "100",
"ksql.persistent.prefix" : "query_",
"ksql.metastore.backup.location" : "",
"ksql.error.classifier.regex" : "",
"ksql.suppress.buffer.size.bytes" : "-1"
}
}
Loading

0 comments on commit 1abdb0d

Please sign in to comment.