Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integration Test for Job Coordinator #886

Merged
merged 19 commits into from
Jul 21, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions .github/workflows/integration_tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
name: integration tests
on: [push, pull_request]
jobs:
maven-integration-test:
runs-on: ubuntu-latest
name: Maven Integration Test
steps:
- uses: actions/checkout@v2
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: '11'
java-package: jdk
architecture: x64
- name: Run integration tests
run: make test-java-integration
5 changes: 4 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ lint-java:
test-java:
mvn --no-transfer-progress test

test-java-integration:
mvn --no-transfer-progress -Dmaven.javadoc.skip=true -Dgpg.skip -DskipUTs=true clean verify

test-java-with-coverage:
mvn --no-transfer-progress test jacoco:report-aggregate

Expand Down Expand Up @@ -175,4 +178,4 @@ build-html: clean-html

# Versions
lint-versions:
./infra/scripts/validate-version-consistency.sh
./infra/scripts/validate-version-consistency.sh
44 changes: 43 additions & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -312,8 +312,50 @@
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>5.1.3.RELEASE</version>
<version>5.2.5.RELEASE</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>5.6.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
<version>1.14.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>junit-jupiter</artifactId>
<version>1.14.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>postgresql</artifactId>
<version>1.14.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>kafka</artifactId>
<version>1.14.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>3.0.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility-proxy</artifactId>
<version>3.0.0</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>
38 changes: 37 additions & 1 deletion core/src/main/java/feast/core/config/FeatureStreamConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,10 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;

@Slf4j
@Configuration
Expand Down Expand Up @@ -94,6 +97,13 @@ public NewTopic featureSetSpecsAckTopic(FeastProperties feastProperties) {
(short) 1);
}

/**
* Creates kafka publisher for sending FeatureSetSpecs to ingestion job. Uses ProtoSerializer to
* serialize FeatureSetSpec.
*
* @param feastProperties
* @return
*/
@Bean
public KafkaTemplate<String, FeatureSetProto.FeatureSetSpec> specKafkaTemplate(
FeastProperties feastProperties) {
Expand All @@ -112,8 +122,34 @@ public KafkaTemplate<String, FeatureSetProto.FeatureSetSpec> specKafkaTemplate(
return t;
}

/**
* Set configured consumerFactory for specs acknowledgment topic (see ackConsumerFactory) as
* default for KafkaListener.
*
* @param consumerFactory
* @return
*/
@Bean
public ConsumerFactory<?, ?> ackConsumerFactory(FeastProperties feastProperties) {
KafkaListenerContainerFactory<
pyalex marked this conversation as resolved.
Show resolved Hide resolved
ConcurrentMessageListenerContainer<String, IngestionJobProto.FeatureSetSpecAck>>
kafkaAckListenerContainerFactory(
ConsumerFactory<String, IngestionJobProto.FeatureSetSpecAck> consumerFactory) {
ConcurrentKafkaListenerContainerFactory<String, IngestionJobProto.FeatureSetSpecAck> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory);
return factory;
}

/**
* Prepares kafka consumer (by configuring ConsumerFactory) to receive acknowledgments from
* IngestionJob on successful updates of FeatureSetSpecs.
*
* @param feastProperties
* @return ConsumerFactory for FeatureSetSpecAck
*/
@Bean
public ConsumerFactory<String, IngestionJobProto.FeatureSetSpecAck> ackConsumerFactory(
FeastProperties feastProperties) {
StreamProperties streamProperties = feastProperties.getStream();
Map<String, Object> props = new HashMap<>();

Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/feast/core/config/JobConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
import feast.proto.core.SourceProto;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

Expand Down Expand Up @@ -94,7 +94,7 @@ public JobGroupingStrategy getJobGroupingStrategy(
* @param feastProperties feast config properties
*/
@Bean
@Autowired
@ConditionalOnMissingBean
pyalex marked this conversation as resolved.
Show resolved Hide resolved
public JobManager getJobManager(
FeastProperties feastProperties,
IngestionJobProto.SpecsStreamingUpdateConfig specsStreamingUpdateConfig)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public abstract class AbstractTimestampEntity {
private Date created;

@Temporal(TemporalType.TIMESTAMP)
@Column(name = "lastUpdated", nullable = false)
@Column(name = "last_updated", nullable = false)
private Date lastUpdated;

@PrePersist
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,9 @@ public void notifyJobsWhenFeatureSetUpdated() {
*
* @param record ConsumerRecord with key: FeatureSet reference and value: Ack message
*/
@KafkaListener(topics = {"${feast.stream.specsOptions.specsAckTopic}"})
@KafkaListener(
topics = {"${feast.stream.specsOptions.specsAckTopic}"},
containerFactory = "kafkaAckListenerContainerFactory")
@Transactional
public void listenAckFromJobs(
ConsumerRecord<String, IngestionJobProto.FeatureSetSpecAck> record) {
Expand Down
202 changes: 202 additions & 0 deletions core/src/test/java/feast/core/it/BaseIT.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,202 @@
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright 2018-2020 The Feast Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package feast.core.it;

import feast.core.config.FeastProperties;
import feast.core.util.KafkaSerialization;
import feast.proto.core.IngestionJobProto;
import io.prometheus.client.CollectorRegistry;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Table;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.hibernate.engine.spi.SessionImplementor;
import org.junit.jupiter.api.*;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.containers.PostgreSQLContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;

/**
* Base Integration Test class. Setups postgres and kafka containers. Configures related properties
* and beans. Provides DB related clean up between tests.
*/
@SpringBootTest
@ActiveProfiles("it")
@Testcontainers
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
public class BaseIT {

@Container public static PostgreSQLContainer<?> postgreSQLContainer = new PostgreSQLContainer<>();

@Container public static KafkaContainer kafka = new KafkaContainer();

/**
* Configure Spring Application to use postgres and kafka rolled out in containers
*
* @param registry
*/
@DynamicPropertySource
static void properties(DynamicPropertyRegistry registry) {

registry.add("spring.datasource.url", postgreSQLContainer::getJdbcUrl);
registry.add("spring.datasource.username", postgreSQLContainer::getUsername);
registry.add("spring.datasource.password", postgreSQLContainer::getPassword);
registry.add("spring.jpa.hibernate.ddl-auto", () -> "none");

registry.add("feast.stream.options.bootstrapServers", kafka::getBootstrapServers);
}

/**
* SequentialFlow is base class that is supposed to be inherited by @Nested test classes that
* wants to preserve context between test cases. For SequentialFlow databases is being truncated
* only once after all tests passed.
*/
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class SequentialFlow {
@AfterAll
public void tearDown() throws Exception {
cleanTables(entityManager);
}
}

/**
* This class must be inherited inside IT Class and annotated with {@link
* org.springframework.boot.test.context.TestConfiguration}. It provides configuration needed to
* communicate with Feast via Kafka
*/
public static class BaseTestConfig {
@Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, byte[]>>
testListenerContainerFactory(ConsumerFactory<String, byte[]> consumerFactory) {
ConcurrentKafkaListenerContainerFactory<String, byte[]> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory);
return factory;
}

@Bean
public ConsumerFactory<String, byte[]> testConsumerFactory() {
Map<String, Object> props = new HashMap<>();

props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
props.put(ConsumerConfig.GROUP_ID_CONFIG, "test");

return new DefaultKafkaConsumerFactory<>(
props, new StringDeserializer(), new ByteArrayDeserializer());
}

@Bean
public KafkaTemplate<String, IngestionJobProto.FeatureSetSpecAck> specAckKafkaTemplate(
FeastProperties feastProperties) {
FeastProperties.StreamProperties streamProperties = feastProperties.getStream();
Map<String, Object> props = new HashMap<>();

props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());

KafkaTemplate<String, IngestionJobProto.FeatureSetSpecAck> t =
new KafkaTemplate<>(
new DefaultKafkaProducerFactory<>(
props, new StringSerializer(), new KafkaSerialization.ProtoSerializer<>()));
t.setDefaultTopic(streamProperties.getSpecsOptions().getSpecsAckTopic());
return t;
}
}

/**
* Truncates all tables in Database (between tests or flows). Retries on deadlock
*
* @param em EntityManager
* @throws SQLException
*/
public static void cleanTables(EntityManager em) throws SQLException {
List<String> tableNames =
em.getMetamodel().getEntities().stream()
.map(e -> e.getJavaType().getAnnotation(Table.class).name())
.collect(Collectors.toList());

// this trick needed to get EntityManager with Transaction
// and we don't want to wrap whole class into @Transactional
em = em.getEntityManagerFactory().createEntityManager();
// Transaction needed only once to do unwrap
SessionImplementor session = em.unwrap(SessionImplementor.class);

// and here we're actually don't want any transactions
// but instead we pulling raw connection
// to be able to retry query if needed
// since retrying rollbacked transaction is not that easy
Connection connection = session.connection();

// retries are needed since truncate require exclusive lock
// and that often leads to Deadlock
// since SpringApp is still running in another thread
var num_retries = 5;
for (var i = 1; i <= num_retries; i++) {
try {
Statement statement = connection.createStatement();
statement.execute(String.format("truncate %s cascade", String.join(", ", tableNames)));
} catch (SQLException e) {
if (i == num_retries) {
throw e;
}
continue;
}

break;
}
}

@PersistenceContext EntityManager entityManager;

/** Used to determine SequentialFlows */
public Boolean isNestedTest(TestInfo testInfo) {
return testInfo.getTestClass().get().getAnnotation(Nested.class) != null;
}

@AfterEach
public void tearDown(TestInfo testInfo) throws Exception {
CollectorRegistry.defaultRegistry.clear();

if (!isNestedTest(testInfo)) {
cleanTables(entityManager);
}
}
}
Loading