Skip to content

Commit

Permalink
Add end-to-end test for Presto on Spark using Docker
Browse files Browse the repository at this point in the history
  • Loading branch information
arhimondr committed Jan 14, 2020
1 parent a7aa342 commit 76ee53c
Show file tree
Hide file tree
Showing 7 changed files with 876 additions and 3 deletions.
15 changes: 12 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,13 @@ env:
- TEST_SPECIFIC_MODULES=presto-hive TEST_FLAGS="-P test-hive-parquet"
- TEST_SPECIFIC_MODULES=presto-main
- TEST_SPECIFIC_MODULES=presto-mongodb
- TEST_OTHER_MODULES=!presto-tests,!presto-raptor,!presto-accumulo,!presto-cassandra,!presto-hive,!presto-kudu,!presto-docs,!presto-server,!presto-server-rpm,!presto-main,!presto-mongodb
- TEST_OTHER_MODULES=!presto-tests,!presto-raptor,!presto-accumulo,!presto-cassandra,!presto-hive,!presto-kudu,!presto-docs,!presto-server,!presto-server-rpm,!presto-main,!presto-mongodb,!presto-spark-package,!presto-spark-launcher,!presto-spark-testing
- PRODUCT_TESTS_BASIC_ENVIRONMENT=true
- PRODUCT_TESTS_SPECIFIC_ENVIRONMENT=true
- PRODUCT_TESTS_SPECIFIC_ENVIRONMENT_2=true
- HIVE_TESTS=true
- KUDU_TESTS=true
- SPARK_INTEGRATION_TESTS=true

sudo: required
dist: trusty
Expand All @@ -54,11 +55,11 @@ install:
fi
- |
if [[ -v TEST_OTHER_MODULES ]]; then
./mvnw install $MAVEN_FAST_INSTALL -pl '!presto-docs,!presto-server,!presto-server-rpm'
./mvnw install $MAVEN_FAST_INSTALL -pl '!presto-docs,!presto-server,!presto-server-rpm,!presto-spark-package,!presto-spark-launcher,!presto-spark-testing'
fi
- |
if [[ -v PRODUCT_TESTS_BASIC_ENVIRONMENT || -v PRODUCT_TESTS_SPECIFIC_ENVIRONMENT || -v PRODUCT_TESTS_SPECIFIC_ENVIRONMENT_2 ]]; then
./mvnw install $MAVEN_FAST_INSTALL -pl '!presto-docs,!presto-server-rpm'
./mvnw install $MAVEN_FAST_INSTALL -pl '!presto-docs,!presto-server-rpm,!presto-spark-package,!presto-spark-launcher,!presto-spark-testing'
fi
- |
if [[ -v HIVE_TESTS ]]; then
Expand All @@ -68,6 +69,10 @@ install:
if [[ -v KUDU_TESTS ]]; then
./mvnw install $MAVEN_FAST_INSTALL -pl presto-kudu -am
fi
- |
if [[ -v SPARK_INTEGRATION_TESTS ]]; then
./mvnw install $MAVEN_FAST_INSTALL -pl '!presto-docs,!presto-server,!presto-server-rpm'
fi
before_script:
- |
Expand Down Expand Up @@ -203,6 +208,10 @@ script:
presto-kudu/bin/run_kudu_tests.sh 1 ""
presto-kudu/bin/run_kudu_tests.sh 1 presto::
fi
- |
if [[ -v SPARK_INTEGRATION_TESTS ]]; then
./mvnw test $MAVEN_SKIP_CHECKS_AND_DOCS -B -pl presto-spark-launcher,presto-spark-package,presto-spark-testing $TEST_FLAGS
fi
before_cache:
# Make the cache stable between builds by removing build output
Expand Down
1 change: 1 addition & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@
<module>presto-spark</module>
<module>presto-spark-package</module>
<module>presto-spark-launcher</module>
<module>presto-spark-testing</module>
</modules>

<dependencyManagement>
Expand Down
137 changes: 137 additions & 0 deletions presto-spark-testing/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>presto-root</artifactId>
<groupId>com.facebook.presto</groupId>
<version>0.232-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>

<artifactId>presto-spark-testing</artifactId>

<properties>
<air.main.basedir>${project.parent.basedir}</air.main.basedir>
</properties>

<dependencies>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>units</artifactId>
</dependency>

<dependency>
<groupId>com.facebook.airlift</groupId>
<artifactId>log</artifactId>
</dependency>

<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>

<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.facebook.airlift</groupId>
<artifactId>testing</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-tests</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-main</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-spi</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-tpch</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-hive-hadoop2</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.facebook.presto.hadoop</groupId>
<artifactId>hadoop-apache2</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<scope>test</scope>
</dependency>

<!-- Just to make sure these two packages are built before the presto-spark-testing -->
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-spark-launcher</artifactId>
<version>${project.version}</version>
<type>jar</type>
<classifier>shaded</classifier>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-spark-package</artifactId>
<version>${project.version}</version>
<type>tar.gz</type>
<scope>test</scope>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.basepom.maven</groupId>
<artifactId>duplicate-finder-maven-plugin</artifactId>
<configuration>
<ignoredResourcePatterns>
<ignoredResourcePattern>parquet.thrift</ignoredResourcePattern>
<ignoredResourcePattern>about.html</ignoredResourcePattern>
</ignoredResourcePatterns>
</configuration>
</plugin>

<!-- Workaround for "Corrupted STDOUT by directly writing to native stream in forked JVM 1" -->
<!-- https://stackoverflow.com/questions/55632614/maven-surefire-plugin-crahsing-jvm-on-java-11-corrupted-stdout-by-directly-writ -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<forkCount>0</forkCount>
</configuration>
</plugin>
</plugins>
</build>
</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spark.testing;

import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;

import java.io.File;
import java.util.List;
import java.util.Map;

import static com.facebook.presto.spark.testing.Processes.startProcess;
import static com.facebook.presto.spark.testing.Processes.waitForProcess;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;

public class DockerCompose
{
private final File composeYaml;

public DockerCompose(File composeYaml)
{
requireNonNull(composeYaml, "composeYaml is null");
checkArgument(composeYaml.exists() && composeYaml.isFile(), "file does not exist: %s", composeYaml);
checkArgument(composeYaml.canRead(), "file is not readable: %s", composeYaml);
this.composeYaml = composeYaml;
}

public void verifyInstallation()
throws InterruptedException
{
checkState(Processes.execute("docker", "--version") == 0, "docker is not installed");
checkState(Processes.execute("docker-compose", "--version") == 0, "docker-compose is not installed");
}

public void pull()
throws InterruptedException
{
int exitCode = execute("pull");
checkState(exitCode == 0, "pull existed with code: %s", exitCode);
}

public Process up(Map<String, Integer> services)
{
ImmutableList.Builder<String> parameters = ImmutableList.builder();
parameters.add("up", "--force-recreate", "--abort-on-container-exit");
services.forEach((service, scale) -> {
parameters.add("--scale", format("%s=%s", service, scale));
});
parameters.addAll(services.keySet());
return start(parameters.build());
}

public void down()
throws InterruptedException
{
int exitCode = execute("down");
checkState(exitCode == 0, "down existed with code: %s", exitCode);
}

public String getContainerAddress(String service)
throws InterruptedException
{
String containerId = getContainerId(service);
return Processes.executeForOutput("docker", "inspect", "-f", "{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}", containerId).trim();
}

public int run(String... parameters)
throws InterruptedException
{
return run(ImmutableList.copyOf(parameters));
}

public int run(List<String> parameters)
throws InterruptedException
{
ImmutableList.Builder<String> args = ImmutableList.builder();
args.add("run", "--rm");
args.addAll(parameters);
return execute(args.build());
}

private String getContainerId(String service)
throws InterruptedException
{
List<String> containerIds = getContainerIds();
for (String containerId : containerIds) {
String output = Processes.executeForOutput(
"docker",
"ps", "-q",
"--filter", format("id=%s", containerId),
"--filter", format("name=%s", service));
if (!output.isEmpty()) {
return containerId;
}
}
throw new IllegalArgumentException(format("container not found: %s", service));
}

private List<String> getContainerIds()
throws InterruptedException
{
String output = executeForOutput("ps", "-q");
return Splitter.on('\n').trimResults().omitEmptyStrings().splitToList(output);
}

private int execute(String... args)
throws InterruptedException
{
return execute(ImmutableList.copyOf(args));
}

private int execute(List<String> args)
throws InterruptedException
{
return waitForProcess(start(args));
}

private String executeForOutput(String... args)
throws InterruptedException
{
return executeForOutput(ImmutableList.copyOf(args));
}

private String executeForOutput(List<String> args)
throws InterruptedException
{
return Processes.executeForOutput(ImmutableList.<String>builder()
.add("docker-compose", "-f", composeYaml.getAbsolutePath())
.addAll(args)
.build());
}

private Process start(List<String> args)
{
return startProcess(ImmutableList.<String>builder()
.add("docker-compose", "-f", composeYaml.getAbsolutePath())
.addAll(args)
.build());
}
}
Loading

0 comments on commit 76ee53c

Please sign in to comment.