Skip to content

Commit

Permalink
Fixing CI of all e2e tests (waldophotos#32)
Browse files Browse the repository at this point in the history
* Adding network_mode to docker compose

* Updating node version in cd/ci

* Installing docker-compose

* Using docker command instead of docker-compose

* Using a simpler docker command

* Changing travis and docker-compose containers

* Docker compose to circleCI

* Updating circleci docker

* localhost as advertised listener

* Adding hosts to circleci and more tolerance with some tests

* Removing intermittent tests to a more precise ci

* More interval to producer

* Updating readme with the new way to run

* Increasing timeout of producer

* Disconnecting consumers
  • Loading branch information
ricardohbin authored May 9, 2018
1 parent 5a54442 commit dc53b8c
Show file tree
Hide file tree
Showing 6 changed files with 62 additions and 40 deletions.
6 changes: 3 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ dist: trusty
language: node_js
sudo: required
node_js:
- '6.9'
- '8.2.0'
services:
- docker
cache:
Expand All @@ -12,6 +12,6 @@ addons:
hosts:
- kafka
before_install:
- docker-compose up -d
- sudo apt-get update -qq
- sudo apt-get -y install libsasl2-dev
- sudo apt-get -y install libsasl2-dev libssl-dev
- docker-compose up -d
7 changes: 6 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,12 @@ Deserialize the provided message, expects a message that includes Magic Byte and

## Testing

[Use the kafka-avro-stub library](https://github.com/waldophotos/kafka-avro-stub) to avoid requiring Kafka and Schema Registry to run on your local for testing your service.
You can use `docker-compose up` to up all the stack before you call your integration tests with `npm test`. How the integration tests are outside the containers, you will need set you `hosts` file to :

```
127.0.0.1 kafka
```


## Releasing

Expand Down
11 changes: 8 additions & 3 deletions circle.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
machine:
pre:
- curl -sSL https://s3.amazonaws.com/circle-downloads/install-circleci-docker.sh | bash -s -- 1.10.0
- pip install docker-compose
services:
- docker
node:
version: 6.9.1
version: 8.2.0
hosts:
kafka: 127.0.0.1

dependencies:
pre:
- docker run -d -p 2181:2181 -p 3030:3030 -p 8081:8081 -p 8082:8082 -p 8083:8083 -p 9092:9092 -e ADV_HOST=127.0.0.1 landoop/fast-data-dev
- docker-compose up -d
- sudo apt-get update;
- sudo apt-get -y install libsasl2-dev
- sudo apt-get -y install libsasl2-dev libssl-dev
40 changes: 32 additions & 8 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,14 +1,38 @@
version: '2'

services:
zookeeper:
image: confluentinc/cp-zookeeper:4.1.0
hostname: zookeeper
environment:
SERVICE_NAME: zookeeper
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
zk_id: "1"
ports:
- "2181:2181"

kafka:
image: landoop/fast-data-dev
image: confluentinc/cp-kafka:4.1.0
hostname: kafka
links:
- zookeeper
ports:
- "9092:9092"
environment:
ADV_HOST: kafka
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://kafka:9092"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1

schema-registry:
image: confluentinc/cp-schema-registry:4.1.0
hostname: schema-registry
links:
- kafka
- zookeeper
ports:
- 2181:2181
- 3030:3030
- 8081:8081
- 8082:8082
- 8083:8083
- 9092:9092
- "8081:8081"
environment:
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: "zookeeper:2181"
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: "PLAINTEXT://kafka:9092"
SCHEMA_REGISTRY_HOST_NAME: schema-registry
4 changes: 1 addition & 3 deletions test/lib/test.lib.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,14 @@ var testLib = module.exports = {};
testLib.log = bunyan.createLogger({
name: 'KafkaAvroTest',
level: 'trace',
// stream: {write: function() {}},
stream: fmt({
outputMode: 'long',
levelInString: true,
}),
});

testLib.KAFKA_SCHEMA_REGISTRY_URL = 'http://localhost:8081';
// testLib.KAFKA_SCHEMA_REGISTRY_URL = 'http://schema-registry-confluent.internal.dev.waldo.photos';
testLib.KAFKA_BROKER_URL = 'localhost:9092';
testLib.KAFKA_BROKER_URL = 'kafka:9092';

testLib.topic = schemaFix.name;
testLib.topicTwo = schemaTwoFix.name;
Expand Down
34 changes: 12 additions & 22 deletions test/spec/consumer.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,8 @@ describe('Consume', function() {

beforeEach(function() {
this.consOpts = {
// 'debug': 'all',
'group.id': 'testKafkaAvro' + crypto.randomBytes(20).toString('hex'),
'enable.auto.commit': true,
// 'auto.offset.reset': 'earliest',
// 'session.timeout.ms': 1000,
};

testLib.log.info('beforeEach 1 on Consume');
Expand All @@ -35,7 +32,6 @@ describe('Consume', function() {
beforeEach(function() {
testLib.log.info('beforeEach 2 on Consume');
return this.kafkaAvro.getProducer({
// 'debug': 'all',
'dr_cb': true,
})
.bind(this)
Expand Down Expand Up @@ -120,16 +116,7 @@ describe('Consume', function() {
setTimeout(() => {
produceTime = Date.now();
this.producer.produce(testLib.topic, -1, message, 'key');
}, 4000);

// //need to keep polling for a while to ensure the delivery reports are received
// var pollLoop = setInterval(function () {
// this.producer.poll();
// if (this.gotReceipt) {
// clearInterval(pollLoop);
// this.producer.disconnect();
// }
// }.bind(this), 1000);
}, 10000);
});

it('should produce and consume a message using consume "on", on a non Schema Registry topic', function(done) {
Expand Down Expand Up @@ -164,7 +151,7 @@ describe('Consume', function() {
testLib.log.info('Producing on non SR topic...');
produceTime = Date.now();
this.producer.produce(topicName, -1, message, 'key');
}, 4000);
}, 10000);
});

it('should produce and consume on two topics using a single consumer', function(done) {
Expand Down Expand Up @@ -211,21 +198,21 @@ describe('Consume', function() {
produceTime = Date.now();
this.producer.produce(testLib.topicTwo, -1, message, 'key');
this.producer.produce(testLib.topic, -1, message, 'key');
}, 2000);
}, 10000);
});
});

describe('Consume using Streams', function() {
it('should produce and consume a message using streams on two topics', function(done) {
var produceTime = 0;

var isDone = false;

var message = {
name: 'Thanasis',
long: 540,
};

var isDone = false;

this.kafkaAvro.getConsumerStream(this.consOpts, { 'enable.auto.commit': true }, { topics: [ testLib.topic, testLib.topicTwo ] })
.then(function (consumerStream) {
consumerStream.on('error', noop);
Expand All @@ -241,7 +228,10 @@ describe('Consume', function() {

expect(data.name).to.equal(message.name);
expect(data.long).to.equal(message.long);
if (!isDone) done();
if (!isDone) {
consumerStream.consumer.disconnect();
done();
}
isDone = true;
});
});
Expand All @@ -250,7 +240,7 @@ describe('Consume', function() {
produceTime = Date.now();
this.producer.produce(testLib.topicTwo, -1, message, 'key');
this.producer.produce(testLib.topic, -1, message, 'key');
}, 2000);
}, 10000);
});

it('should produce and consume a message using streams on a not SR topic', function(done) {
Expand Down Expand Up @@ -278,15 +268,15 @@ describe('Consume', function() {

expect(data.name).to.equal(message.name);
expect(data.long).to.equal(message.long);

consumerStream.consumer.disconnect();
done();
});
});

setTimeout(() => {
produceTime = Date.now();
this.producer.produce(topicName, -1, message, 'key');
}, 2000);
}, 10000);
});
});
});

0 comments on commit dc53b8c

Please sign in to comment.