Skip to content

Commit

Permalink
Druid catalog basics
Browse files Browse the repository at this point in the history
Catalog object model for tables, columns
Druid metadata DB storage (as an exension)
REST API to update the catalog (as an extension)
Integration tests
Model only: no planner integration yet
  • Loading branch information
paul-rogers committed Oct 4, 2022
1 parent 7fa53ff commit bd7c8be
Show file tree
Hide file tree
Showing 99 changed files with 12,925 additions and 38 deletions.
13 changes: 9 additions & 4 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -687,12 +687,17 @@ jobs:
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
# Uses the installation defined above. Then, builds the test tools and docker image,
# and runs one IT. If tests fail, echos log lines of any of
# the Druid services that did not exit normally.
script: ./it.sh travis MultiStageQuery

# Disabling BatchIndex test as it is failing with due to timeout, fixing it will be taken in a separate PR.
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) catalog integration tests"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: ./it.sh travis Catalog

# Disabling BatchIndex test as it is failing with due to timeout, fixing it will be taken in a separate PR.
#- <<: *integration_tests_ex
# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer (new)"
# env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
Expand Down
10 changes: 6 additions & 4 deletions core/src/main/java/org/apache/druid/data/input/InputFormat.java
Original file line number Diff line number Diff line change
Expand Up @@ -41,15 +41,17 @@
* See {@link NestedInputFormat} for nested input formats such as JSON.
*/
@UnstableApi
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = InputFormat.TYPE_PROPERTY)
@JsonSubTypes(value = {
@Type(name = "csv", value = CsvInputFormat.class),
@Type(name = "json", value = JsonInputFormat.class),
@Type(name = CsvInputFormat.TYPE_KEY, value = CsvInputFormat.class),
@Type(name = JsonInputFormat.TYPE_KEY, value = JsonInputFormat.class),
@Type(name = "regex", value = RegexInputFormat.class),
@Type(name = "tsv", value = DelimitedInputFormat.class)
@Type(name = DelimitedInputFormat.TYPE_KEY, value = DelimitedInputFormat.class)
})
public interface InputFormat
{
String TYPE_PROPERTY = "type";

/**
* Trait to indicate that a file can be split into multiple {@link InputSplit}s.
* <p>
Expand Down
12 changes: 8 additions & 4 deletions core/src/main/java/org/apache/druid/data/input/InputSource.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.apache.druid.data.input;

import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
Expand Down Expand Up @@ -47,19 +48,22 @@
* }</pre>
*/
@UnstableApi
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = InputSource.TYPE_PROPERTY)
@JsonSubTypes(value = {
@Type(name = "local", value = LocalInputSource.class),
@Type(name = "http", value = HttpInputSource.class),
@Type(name = "inline", value = InlineInputSource.class),
@Type(name = LocalInputSource.TYPE_KEY, value = LocalInputSource.class),
@Type(name = HttpInputSource.TYPE_KEY, value = HttpInputSource.class),
@Type(name = InlineInputSource.TYPE_KEY, value = InlineInputSource.class),
@Type(name = "combining", value = CombiningInputSource.class)
})
public interface InputSource
{
String TYPE_PROPERTY = "type";

/**
* Returns true if this inputSource can be processed in parallel using ParallelIndexSupervisorTask. It must be
* castable to SplittableInputSource and the various SplittableInputSource methods must work as documented.
*/
@JsonIgnore
boolean isSplittable();

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@

public class CsvInputFormat extends FlatTextInputFormat
{
public static final String TYPE_KEY = "csv";
private static final char SEPARATOR = ',';

@JsonCreator
Expand All @@ -59,6 +60,7 @@ public String getDelimiter()
}

@Override
@JsonIgnore
public boolean isSplittable()
{
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,11 @@
import java.util.List;

/**
* InputFormat for customized Delimitor Separate Value format of input data(default is TSV).
* InputFormat for customized Delimiter Separate Value format of input data (default is TSV).
*/
public class DelimitedInputFormat extends FlatTextInputFormat
{
public static final String TYPE_KEY = "tsv";
private static final String DEFAULT_DELIMITER = "\t";

@JsonCreator
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@

public class HttpInputSource extends AbstractInputSource implements SplittableInputSource<URI>
{
public static final String TYPE_KEY = "http";

private final List<URI> uris;
@Nullable
private final String httpAuthenticationUsername;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@
package org.apache.druid.data.input.impl;

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import org.apache.druid.data.input.AbstractInputSource;
import org.apache.druid.data.input.InputFormat;
import org.apache.druid.data.input.InputRowSchema;
Expand All @@ -35,12 +37,14 @@

public class InlineInputSource extends AbstractInputSource
{
public static final String TYPE_KEY = "inline";

private final String data;

@JsonCreator
public InlineInputSource(@JsonProperty("data") String data)
{
Preconditions.checkArgument(data != null && !data.isEmpty(), "empty data");
Preconditions.checkArgument(!Strings.isNullOrEmpty(data), "empty data");
this.data = data;
}

Expand All @@ -51,6 +55,7 @@ public String getData()
}

@Override
@JsonIgnore
public boolean isSplittable()
{
return false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,20 +39,20 @@

public class JsonInputFormat extends NestedInputFormat
{
public static final String TYPE_KEY = "json";

private final Map<String, Boolean> featureSpec;
private final ObjectMapper objectMapper;
private final boolean keepNullColumns;

/**
*
* This parameter indicates whether or not the given InputEntity should be split by lines before parsing it.
* Indicates whether or not the given InputEntity should be split by lines before parsing it.
* If it is set to true, the InputEntity must be split by lines first.
* If it is set to false, unlike what you could imagine, it means that the InputEntity doesn't have to be split by lines first, but it can still contain multiple lines.
* A created InputEntityReader from this format will determine by itself if line splitting is necessary.
*
* This parameter should always be true for batch ingestion and false for streaming ingestion.
* For more information, see: https://github.com/apache/druid/pull/10383.
*
*/
private final boolean lineSplittable;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
public class LocalInputSource extends AbstractInputSource implements SplittableInputSource<List<File>>
{
private static final Logger log = new Logger(LocalInputSource.class);
public static final String TYPE_KEY = "local";

@Nullable
private final File baseDir;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@

public class DefaultPasswordProvider implements PasswordProvider
{
public static final String TYPE_KEY = "default";

private final String password;

@JsonCreator
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@

public class EnvironmentVariablePasswordProvider implements PasswordProvider
{
public static final String TYPE_KEY = "environment";

private final String variable;

@JsonCreator
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,8 @@
@ExtensionPoint
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DefaultPasswordProvider.class)
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = "default", value = DefaultPasswordProvider.class),
@JsonSubTypes.Type(name = "environment", value = EnvironmentVariablePasswordProvider.class),

@JsonSubTypes.Type(name = DefaultPasswordProvider.TYPE_KEY, value = DefaultPasswordProvider.class),
@JsonSubTypes.Type(name = EnvironmentVariablePasswordProvider.TYPE_KEY, value = EnvironmentVariablePasswordProvider.class),
})
public interface PasswordProvider
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ public static ColumnType ofArray(ColumnType elementType)
{
return ColumnTypeFactory.getInstance().ofArray(elementType);
}

public static ColumnType ofComplex(@Nullable String complexTypeName)
{
return ColumnTypeFactory.getInstance().ofComplex(complexTypeName);
Expand Down
3 changes: 3 additions & 0 deletions distribution/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
<version>${project.parent.version}</version>
</dependency>
</dependencies>

<properties>
<!-- the default value is a repeated flag from the command line, since blank value is not allowed -->
<druid.distribution.pulldeps.opts>--clean</druid.distribution.pulldeps.opts>
Expand Down Expand Up @@ -255,6 +256,8 @@
<argument>org.apache.druid.extensions:druid-ranger-security</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-kubernetes-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-catalog</argument>
<argument>${druid.distribution.pulldeps.opts}</argument>
</arguments>
</configuration>
Expand Down
Loading

0 comments on commit bd7c8be

Please sign in to comment.