Skip to content

Commit

Permalink
Merge pull request #978 from confluentinc/prep152
Browse files Browse the repository at this point in the history
Preps for v1.5.2 release
  • Loading branch information
edenhill authored Oct 29, 2020
2 parents baee8a6 + 0b89441 commit c33a0cc
Show file tree
Hide file tree
Showing 57 changed files with 130 additions and 124 deletions.
3 changes: 2 additions & 1 deletion .appveyor.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
environment:
global:
LIBRDKAFKA_NUGET_VERSION: 1.5.0
LIBRDKAFKA_NUGET_VERSION: 1.5.2
CIBW_SKIP: cp33-* cp34-*
CIBW_TEST_REQUIRES: pytest pytest-timeout requests trivup
# SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
Expand All @@ -22,6 +22,7 @@ install:
- SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%
- python --version
- python -m pip install -U pip
- python -m pip install -U -r tests/requirements.txt

build_script:
- tools/windows-build.bat
Expand Down
36 changes: 17 additions & 19 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,33 +1,33 @@
env:
global:
- LIBRDKAFKA_VERSION=v1.5.0
- LIBRDKAFKA_VERSION=v1.5.2
jobs:
include:
# Source package verification with Python 2.7
- os: linux
- name: "Source package verification with Python 2.7 (Linux)"
os: linux
language: python
dist: trusty
python: "2.7"
env: LD_LIBRARY_PATH="$PWD/tmp-build/lib"
# Source package verification with Python 3.6
- os: linux
- name: "Source package verification with Python 3.6 (Linux)"
os: linux
language: python
dist: trusty
python: "3.6"
env: LD_LIBRARY_PATH="$PWD/tmp-build/lib"
# Source package verification with Python 2.7
- os: osx
- name: "Source package verification with Python 2.7 (OSX)"
os: osx
python: "2.7"
env: DYLD_LIBRARY_PATH="$PWD/tmp-build/lib" INTERPRETER_VERSION="2.7.17"
# Source package verification with Python 3.6
- os: osx
- name: "Source package verification with Python 3.6 (OSX) +docs"
os: osx
python: "3.6"
env: DYLD_LIBRARY_PATH="$PWD/tmp-build/lib" MK_DOCS="y" INTERPRETER_VERSION="3.6.5"
# cibuildwheel for osx
- os: osx
- name: "cibuildwheel (OSX)"
os: osx
env: CIBW_BEFORE_BUILD="tools/bootstrap-librdkafka.sh --require-ssl ${LIBRDKAFKA_VERSION} tmp" CFLAGS="-Itmp/include" LDFLAGS="-Ltmp/lib" INTERPRETER_VERSION="2.7.17"
# cibuildwheel for manylinux
- os: linux
- name: "cibuildwheel (manylinux)"
os: linux
dist: trusty
env:
- CIBW_BEFORE_BUILD="tools/prepare-cibuildwheel-linux.sh ${LIBRDKAFKA_VERSION}"
Expand All @@ -53,22 +53,20 @@ before_install:
install:
- tools/install-interceptors.sh
- pip install -r tests/requirements.txt
- pip install tox
- flake8
- if [[ $MK_DOCS == y ]]; then pip install -r docs/requirements.txt; fi
- if [[ -z $CIBW_BEFORE_BUILD ]]; then tools/bootstrap-librdkafka.sh --require-ssl ${LIBRDKAFKA_VERSION} tmp-build && pip install --global-option=build_ext --global-option="-Itmp-build/include/" --global-option="-Ltmp-build/lib" . .[avro]; fi

# Build wheels
script:
- if [[ -n $TRAVIS_TAG && -n $CIBW_BEFORE_BUILD ]]; then cibuildwheel --output-dir wheelhouse1 && tools/fixup-wheels.sh wheelhouse1 wheelhouse; fi

# Make plugins available for tests
# Execute tests if not CIBW_BEFORE_BUILD [osx, linux]
# Execute integration tests if CIBW_BEFORE_BUILD
# Build docs if MK_DOCS
after_script:
script:
- if [[ -n $TRAVIS_TAG && -n $CIBW_BEFORE_BUILD ]]; then cibuildwheel --output-dir wheelhouse1 && tools/fixup-wheels.sh wheelhouse1 wheelhouse; fi
- ldd staging/libs/* || otool -L staging/libs/* || true
- if [[ -z $CIBW_BEFORE_BUILD && $TRAVIS_OS_NAME == "osx" ]]; then DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:staging/libs py.test --timeout=60 --ignore=tmp-build --import-mode append; fi
- if [[ -z $CIBW_BEFORE_BUILD && $TRAVIS_OS_NAME == "linux" ]]; then LD_LIBRARY_PATH=$LD_LIBRARY_PATH:staging/libs py.test --timeout=60 --ignore=tmp-build --import-mode append; fi
- [[ -n $CIBW_BEFORE_BUILD ]] || LD_LIBRARY_PATH=$LD_LIBRARY_PATH:staging/libs DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:staging/libs tox
- if [[ -n $TRAVIS_TAG && $TRAVIS_OS_NAME == osx && -n $CIBW_BEFORE_BUILD ]]; then tools/test-wheel.sh wheelhouse; fi
- if [[ $MK_DOCS == y ]]; then make docs; fi

Expand Down
23 changes: 23 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,28 @@
# Confluent's Python client for Apache Kafka

## v1.5.2

v1.5.2 is a maintenance release with the following fixing and enhancements:

- Add producer purge method with optional blocking argument (@peteryin21, #548)
- Add AdminClient.list_groups API (@messense, #948)
- Rename asyncio.py example to avoid circular import (#945)
- Upgrade bundled OpenSSL to v1.1.1h (from v1.0.2u)
- The Consumer destructor will no longer trigger `consumer.close()`
callbacks, `consumer.close()` must now be explicitly called to cleanly
close down the consumer and leave the group.
- Fix `PY_SSIZE_T_CLEAN` warning in calls to produce().
- Restructure source tree to avoid undesired local imports of confluent_kafka
when running pytest.

confluent-kafka-python is based on librdkafka v1.5.2, see the
[librdkafka release notes](https://github.com/edenhill/librdkafka/releases/tag/v1.5.2)
for a complete list of changes, enhancements, fixes and upgrade considerations.


**Note: There was no v1.5.1 release**


## v1.5.0

v1.5.0 is a maintenance release with the following fixes and enhancements:
Expand Down
2 changes: 1 addition & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
include README.md
include LICENSE.txt
include test-requirements.txt
include confluent_kafka/src/*.[ch]
include src/confluent_kafka/src/*.[ch]
4 changes: 2 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,9 @@
# built documents.
#
# The short X.Y version.
version = '1.5.0'
version = '1.5.2'
# The full version, including alpha/beta/rc tags.
release = '1.5.0'
release = version

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
2 changes: 1 addition & 1 deletion examples/docker/Dockerfile.alpine
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ FROM alpine:3.12

COPY . /usr/src/confluent-kafka-python

ENV LIBRDKAFKA_VERSION v1.5.0
ENV LIBRDKAFKA_VERSION v1.5.2
ENV KAFKACAT_VERSION master


Expand Down
7 changes: 4 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import platform

work_dir = os.path.dirname(os.path.realpath(__file__))
mod_dir = os.path.join(work_dir, 'confluent_kafka')
mod_dir = os.path.join(work_dir, 'src', 'confluent_kafka')
ext_dir = os.path.join(mod_dir, 'src')

INSTALL_REQUIRES = [
Expand Down Expand Up @@ -64,13 +64,14 @@ def get_install_requirements(path):
setup(name='confluent-kafka',
# Make sure to bump CFL_VERSION* in confluent_kafka/src/confluent_kafka.h
# and version and release in docs/conf.py.
version='1.5.0',
version='1.5.2',
description='Confluent\'s Python client for Apache Kafka',
author='Confluent Inc',
author_email='[email protected]',
url='https://github.com/confluentinc/confluent-kafka-python',
ext_modules=[module],
packages=find_packages(exclude=("tests", "tests.*")),
packages=find_packages('src'),
package_dir={'': 'src'},
data_files=[('', [os.path.join(work_dir, 'LICENSE.txt')])],
install_requires=INSTALL_REQUIRES,
extras_require={
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,12 @@ def set_config(conf, args):
if n == 'partition.assignment.strategy':
# Convert Java class name to config value.
# "org.apache.kafka.clients.consumer.RangeAssignor" -> "range"
conf[n] = re.sub(r'org.apache.kafka.clients.consumer.(\w+)Assignor',
lambda x: x.group(1).lower(), v)
else:
conf[n] = v
v = re.sub(r'org.apache.kafka.clients.consumer.(\w+)Assignor',
lambda x: x.group(1).lower(), v)
if v == 'sticky':
v = 'cooperative-sticky'

conf[n] = v

@staticmethod
def read_config_file(path):
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,8 @@ static void Consumer_dealloc (Handle *self) {

CallState_begin(self, &cs);

/* If application has not called c.close() then
* rd_kafka_destroy() will, and that might trigger
* callbacks to be called from consumer_close().
* This should probably be fixed in librdkafka,
* or the application. */
rd_kafka_destroy(self->rk);
rd_kafka_destroy_flags(self->rk,
RD_KAFKA_DESTROY_F_NO_CONSUMER_CLOSE);

CallState_end(self, &cs);
}
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ Producer_produce0 (Handle *self,
static PyObject *Producer_produce (Handle *self, PyObject *args,
PyObject *kwargs) {
const char *topic, *value = NULL, *key = NULL;
int value_len = 0, key_len = 0;
Py_ssize_t value_len = 0, key_len = 0;
int partition = RD_KAFKA_PARTITION_UA;
PyObject *headers = NULL, *dr_cb = NULL, *dr_cb2 = NULL;
long long timestamp = 0;
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
* limitations under the License.
*/

#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include <structmember.h>
#include <pythread.h>
Expand Down Expand Up @@ -41,8 +42,8 @@
* 0xMMmmRRPP
* MM=major, mm=minor, RR=revision, PP=patchlevel (not used)
*/
#define CFL_VERSION 0x01050000
#define CFL_VERSION_STR "1.5.0"
#define CFL_VERSION 0x01050200
#define CFL_VERSION_STR "1.5.2"

/**
* Minimum required librdkafka version. This is checked both during
Expand Down
11 changes: 6 additions & 5 deletions tests/docker/.env
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
#!/usr/bin/env bash

export DOCKER_SOURCE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
export DOCKER_CONTEXT=$DOCKER_SOURCE/docker-compose.yaml
export DOCKER_BIN=$DOCKER_SOURCE/bin
export DOCKER_CONF=$DOCKER_SOURCE/conf
export TLS=$DOCKER_CONF/tls
export PY_DOCKER_SOURCE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
export PY_DOCKER_COMPOSE_FILE=$PY_DOCKER_SOURCE/docker-compose.yaml
export PY_DOCKER_CONTEXT="python-test-$(uuidgen)"
export PY_DOCKER_BIN=$PY_DOCKER_SOURCE/bin
export PY_DOCKER_CONF=$PY_DOCKER_SOURCE/conf
export TLS=$PY_DOCKER_CONF/tls

export MY_BOOTSTRAP_SERVER_ENV=localhost:29092
export MY_SCHEMA_REGISTRY_URL_ENV=http://$(hostname):8081
Expand Down
10 changes: 5 additions & 5 deletions tests/docker/bin/certify.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@

set -eu

DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
PY_DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
export PASS="abcdefgh"

source ${DOCKER_BIN}/../.env
source ${PY_DOCKER_BIN}/../.env

mkdir -p ${TLS}

Expand All @@ -17,11 +17,11 @@ fi
HOST=$(hostname -f)

echo "Creating ca-cert..."
${DOCKER_BIN}/gen-ssl-certs.sh ca ${TLS}/ca-cert ${HOST}
${PY_DOCKER_BIN}/gen-ssl-certs.sh ca ${TLS}/ca-cert ${HOST}
echo "Creating server cert..."
${DOCKER_BIN}/gen-ssl-certs.sh -k server ${TLS}/ca-cert ${TLS}/ ${HOST} ${HOST}
${PY_DOCKER_BIN}/gen-ssl-certs.sh -k server ${TLS}/ca-cert ${TLS}/ ${HOST} ${HOST}
echo "Creating client cert..."
${DOCKER_BIN}/gen-ssl-certs.sh client ${TLS}/ca-cert ${TLS}/ ${HOST} ${HOST}
${PY_DOCKER_BIN}/gen-ssl-certs.sh client ${TLS}/ca-cert ${TLS}/ ${HOST} ${HOST}

echo "Creating key ..."
openssl rsa -in ${TLS}/client.key -out ${TLS}/client.key -passin pass:${PASS}
Expand Down
6 changes: 3 additions & 3 deletions tests/docker/bin/cluster_down.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

set -eu

DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
source ${DOCKER_BIN}/../.env
PY_DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
source ${PY_DOCKER_BIN}/../.env

echo "Destroying cluster.."
docker-compose -f ${DOCKER_CONTEXT} down -v --remove-orphans
docker-compose -f $PY_DOCKER_COMPOSE_FILE down -v --remove-orphans
13 changes: 7 additions & 6 deletions tests/docker/bin/cluster_up.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

set -eu

DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
source ${DOCKER_BIN}/../.env
PY_DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
source ${PY_DOCKER_BIN}/../.env

# Wait for http service listener to come up and start serving
# $1 http service name
Expand All @@ -27,20 +27,21 @@ await_http() {
}

echo "Configuring Environment..."
source ${DOCKER_SOURCE}/.env
source ${PY_DOCKER_SOURCE}/.env

echo "Generating SSL certs..."
${DOCKER_BIN}/certify.sh
${PY_DOCKER_BIN}/certify.sh

echo "Deploying cluster..."
docker-compose -f ${DOCKER_CONTEXT} up -d
docker-compose -f $PY_DOCKER_COMPOSE_FILE up -d

echo "Setting throttle for throttle test..."
docker-compose -f ${DOCKER_CONTEXT} exec kafka sh -c "
docker-compose -f $PY_DOCKER_COMPOSE_FILE exec kafka sh -c "
/usr/bin/kafka-configs --zookeeper zookeeper:2181 \
--alter --add-config 'producer_byte_rate=1,consumer_byte_rate=1,request_percentage=001' \
--entity-name throttled_client --entity-type clients"

await_http "schema-registry" "http://localhost:8081"

await_http "schema-registry-basic-auth" "http://localhost:8083"

1 change: 1 addition & 0 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
def kafka_cluster():

cluster = TrivupFixture({'with_sr': True,
'cp_version': 'latest',
'broker_conf': ['transaction.state.log.replication.factor=1',
'transaction.state.log.min.isr=1']})
try:
Expand Down
6 changes: 4 additions & 2 deletions tests/integration/consumer/test_consumer_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
#

import pytest
from confluent_kafka.cimpl import TopicPartition, OFFSET_END
from confluent_kafka import TopicPartition, OFFSET_END, KafkaError

from confluent_kafka.error import ConsumeError
from confluent_kafka.serialization import StringSerializer
Expand All @@ -39,6 +39,8 @@ def test_consume_error(kafka_cluster):
value_deserializer=StringSerializer())
consumer.assign([TopicPartition(topic, 0, OFFSET_END)])

with pytest.raises(ConsumeError, match="No more messages"):
with pytest.raises(ConsumeError) as exc_info:
# Trigger EOF error
consumer.poll()
assert exc_info.value.args[0].code() == KafkaError._PARTITION_EOF, \
"Expected _PARTITION_EOF, not {}".format(exc_info)
Loading

0 comments on commit c33a0cc

Please sign in to comment.