Skip to content

Commit

Permalink
Merge branch 'aiven:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
amrutha-shanbhag authored Apr 6, 2022
2 parents b0935dc + 253d2c3 commit 7552049
Show file tree
Hide file tree
Showing 18 changed files with 763 additions and 544 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,12 @@ jobs:
- name: Install dependencies
run: pip install -r requirements-dev.txt

# required for pylint
- name: Generate version.py
run: make karapace/version.py

- name: Run all pre-commit hooks
run: make lint
run: pre-commit run --all-files

copyright:
runs-on: ubuntu-latest
Expand Down
18 changes: 13 additions & 5 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,6 @@ jobs:
with:
python-version: ${{ matrix.python-version }}

- name: Fetch kafka
run: make fetch-kafka

- name: Install dependencies
run: python -m pip install -r requirements-dev.txt

Expand All @@ -33,8 +30,19 @@ jobs:
with:
version: '3.13.0'

# needed by both unit and integation tests
- name: Generate version.py
run: make karapace/version.py

- name: Execute unit-tests
run: make unittest
run: python3 -m pytest -s -vvv tests/unit/

- name: Execute integration-tests
run: make integrationtest
run: python3 -m pytest -s -vvv tests/integration/ --log-dir=/tmp/ci-logs --log-file=/tmp/ci-logs/pytest.log

- name: Archive logs
uses: actions/upload-artifact@v2
if: ${{ always() }}
with:
name: logs ${{ matrix.python-version }}
path: /tmp/ci-logs
19 changes: 12 additions & 7 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,24 +47,29 @@ In Fedora® distributions you can install it using:
dnf install protobuf-compiler
```

To run the tests use `make`. It will download Kafka to be used in the tests for you:
To run the tests use the binary `pytest` available in the virtualenv. It will download Kafka to be
used in the tests for you:

```sh
make unittest
make integrationtest
make karapace/version.py
pytest tests/unit
pytest tests/integration
```

### PyCharm
The integration tests can be configured with the use of a few parameters:

If you want to run the tests from within the IDE, first download Kafka using `make fetch-kafka`, and
use the project root as the working directory.
- `--kafka-version`: allows to change the version of the Kafka server used by the tests. Example
versions: `2.7.2`, `2.8.1`, `3.0.0`.
- `--kafka-bootstrap-servers`: A comma separated list of servers. This option allows to use an
external server (the tests won't start a server for you)

Other options can be seen with `pytest test/integration --help`

## Static checking and Linting

The code is statically checked and formatted using [a few
tools](https://github.com/aiven/karapace/blob/master/requirements-dev.txt). To run these
automatically on each commit please enable the [pre-commit](https://pre-commit.com) hooks.
Alternatively you can run it manually with `make pre-commit`.

## Manual testing

Expand Down
88 changes: 1 addition & 87 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,23 +1,6 @@
SHORT_VER = $(shell git describe --tags --abbrev=0 | cut -f1-)
LONG_VER = $(shell git describe --long 2>/dev/null || echo $(SHORT_VER)-0-unknown-g`git describe --always`)
KAFKA_PATH = kafka_$(SCALA_VERSION)-$(KAFKA_VERSION)
KAFKA_TAR = $(KAFKA_PATH).tgz
PYTHON_SOURCE_DIRS = karapace/
PYTHON_TEST_DIRS = tests/
ALL_PYTHON_DIRS = $(PYTHON_SOURCE_DIRS) $(PYTHON_TEST_DIRS)
GENERATED = karapace/version.py
PYTHON = python3
DNF_INSTALL = sudo dnf install -y

# Keep these is sync with tests/integration/conftest.py
KAFKA_VERSION=2.7.0
SCALA_VERSION=2.13

KAFKA_IMAGE = karapace-test-kafka
ZK = 2181
KAFKA = 9092

default: $(GENERATED)
default: karapace/version.py

clean:
# remove all the versions of kafka
Expand All @@ -30,78 +13,9 @@ clean:
# delete generate files
rm karapace/version.py

.PHONY: $(KAFKA_IMAGE)
$(KAFKA_IMAGE):
podman build -t $(KAFKA_IMAGE) -f container/Dockerfile .

.PHONY: start-$(KAFKA_IMAGE)
start-$(KAFKA_IMAGE):
@podman run -d --rm -p $(ZK):$(ZK) -p $(KAFKA):$(KAFKA) -p $(REGISTRY):$(REGISTRY) -p $(REST):$(REST) $(KAFKA_IMAGE) "all"
@podman ps

karapace/version.py: version.py
$(PYTHON) $^ $@

$(KAFKA_TAR):
wget "https://archive.apache.org/dist/kafka/$(KAFKA_VERSION)/$(KAFKA_PATH).tgz"

$(KAFKA_PATH): $(KAFKA_TAR)
tar zxf "$(KAFKA_TAR)"

.PHONY: fetch-kafka
fetch-kafka: $(KAFKA_PATH)

.PHONY: start-kafka
start-kafka: fetch-kafka
$(KAFKA_PATH)/bin/zookeeper-server-start.sh $(KAFKA_PATH)/config/zookeeper.properties &
$(KAFKA_PATH)/bin/kafka-server-start.sh $(KAFKA_PATH)/config/server.properties &

.PHONY: stop-kafka
stop-kafka:
$(KAFKA_PATH)/bin/kafka-server-stop.sh 9 || true
$(KAFKA_PATH)/bin/zookeeper-server-stop.sh 9 || true
rm -rf /tmp/kafka-logs /tmp/zookeeper

.PHONY: kafka
kafka: start-kafka

.PHONY: black
black: $(GENERATED)
pre-commit run black --all-files

.PHONY: pylint
pylint: $(GENERATED)
pre-commit run pylint --all-files

.PHONY: flake8
flake8: $(GENERATED)
pre-commit run flake8 --all-files

.PHONY: copyright
copyright:
grep -EL "Copyright \(c\) 20.* Aiven" $(shell git ls-files "*.py" | grep -v __init__.py)

.PHONY: unittest
unittest: $(GENERATED)
python3 -m pytest -s -vvv tests/unit/

.PHONY: integrationtest
integrationtest: fetch-kafka $(GENERATED)
python3 -m pytest -s -vvv tests/integration/

.PHONY: test
test: lint copyright unittest

.PHONY: isort
isort:
pre-commit run isort --all-files

.PHONY: reformat
reformat: isort black

.PHONY: pre-commit
pre-commit: $(GENERATED)
pre-commit run --all-files

.PHONY: lint
lint: pre-commit
1 change: 1 addition & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[pytest]
addopts = -ra -q --tb=short --showlocals --numprocesses auto
timeout = 60
timeout_func_only = true
97 changes: 86 additions & 11 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,15 @@
from typing import List, Optional

import pytest
import re
import ujson

pytest_plugins = "aiohttp.pytest_plugin"
KAFKA_BOOTSTRAP_SERVERS_OPT = "--kafka-bootstrap-servers"
KAFKA_VERION_OPT = "--kafka-version"
KAFKA_VERSION = "2.7.0"
LOG_DIR_OPT = "--log-dir"
VERSION_REGEX = "([0-9]+[.])*[0-9]+"


def pytest_assertrepr_compare(op, left, right) -> Optional[List[str]]:
Expand Down Expand Up @@ -49,38 +55,107 @@ def split_by_comma(arg: str) -> List[str]:


def pytest_addoption(parser, pluginmanager) -> None: # pylint: disable=unused-argument
parser.addoption("--kafka-bootstrap-servers", type=split_by_comma)
parser.addoption("--registry-url")
parser.addoption("--rest-url")
parser.addoption("--server-ca")
# Configuration options for the services started by the test suite
parser.addoption(
KAFKA_VERION_OPT,
default=KAFKA_VERSION,
help=f"Kafka version used by the test suite. (Incompatible with {KAFKA_BOOTSTRAP_SERVERS_OPT})",
)
parser.addoption(
LOG_DIR_OPT,
help=f"Directory to save Kafka/ZK logs (Incompatible with {KAFKA_BOOTSTRAP_SERVERS_OPT})",
)

# Configuration options for services external to the test suite
parser.addoption(
KAFKA_BOOTSTRAP_SERVERS_OPT,
type=split_by_comma,
help=(
f"Kafka servers to be used for testing, format is comma separated "
f"list of <server>:<port>. If provided the test suite will not start "
f"a Kafka server. (Incompatible with {KAFKA_VERION_OPT})"
),
)
parser.addoption(
"--registry-url",
help=(
"URL of a running Schema Registry instance. If provided the test "
"suite will not start a Schema Registry instance"
),
)
parser.addoption(
"--rest-url",
help="URL of a running REST API instance. If provided the test suite will not start a REST API instance",
)
parser.addoption(
"--server-ca",
help="Certificate file used to validate the Schema Registry server.",
)


@pytest.fixture(autouse=True, scope="session")
def fixture_validate_options(request) -> None:
"""This fixture only exists to validate the custom command line flags."""
bootstrap_servers = request.config.getoption("kafka_bootstrap_servers")
kafka_bootstrap_servers = request.config.getoption("kafka_bootstrap_servers")
log_dir = request.config.getoption("log_dir")
kafka_version = request.config.getoption("kafka_version")
registry_url = request.config.getoption("registry_url")
rest_url = request.config.getoption("rest_url")
server_ca = request.config.getoption("server_ca")

has_external_registry_or_rest = registry_url or rest_url

if not re.match(VERSION_REGEX, kafka_version):
msg = "Provided Kafka version has invalid format {kafka_version} should match {VERSION_REGEX}"
raise ValueError(msg)

if kafka_bootstrap_servers is not None and log_dir is not None:
msg = f"{KAFKA_BOOTSTRAP_SERVERS_OPT} and {LOG_DIR_OPT} are incompatible options, only provide one of the two"
raise ValueError(msg)

if kafka_bootstrap_servers is not None and kafka_version is not None:
msg = f"{KAFKA_BOOTSTRAP_SERVERS_OPT} and {KAFKA_VERION_OPT} are incompatible options, only provide one of the two"
raise ValueError(msg)

if server_ca and not has_external_registry_or_rest:
msg = "When using a server CA, an external registry or rest URI must also be provided."
raise ValueError(msg)

if has_external_registry_or_rest and not bootstrap_servers:
if has_external_registry_or_rest and not kafka_bootstrap_servers:
msg = "When using an external registry or rest, the kafka bootstrap URIs must also be provided."
raise ValueError(msg)


@pytest.fixture(scope="session", name="session_tmppath")
def fixture_session_tmppath(tmp_path_factory) -> Path:
return tmp_path_factory.mktemp("karapace")
@pytest.fixture(scope="session", name="session_datadir")
def fixture_session_datadir(tmp_path_factory) -> Path:
"""Data files generated throught the tests should be stored here.
These files are NOT persisted.
"""
return tmp_path_factory.mktemp("data")


@pytest.fixture(scope="session", name="session_logdir")
def fixture_session_logdir(request, tmp_path_factory, worker_id) -> Path:
"""All useful log data for debugging should be stored here.
These files are persisted by the CI for debugging purposes.
"""
log_dir = request.config.getoption("log_dir")

if log_dir is None and worker_id == "master":
path = tmp_path_factory.mktemp("log")
elif log_dir is None:
path = tmp_path_factory.getbasetemp().parent / "log"
path.mkdir(parents=True, exist_ok=True)
else:
path = Path(log_dir)
path.mkdir(parents=True, exist_ok=True)
return path


@pytest.fixture(scope="session", name="default_config_path")
def fixture_default_config(session_tmppath: Path) -> str:
path = session_tmppath / "karapace_config.json"
def fixture_default_config(session_logdir: Path) -> str:
path = session_logdir / "karapace_config.json"
path.write_text(ujson.dumps({"registry_host": "localhost", "registry_port": 8081}))
return str(path)
71 changes: 71 additions & 0 deletions tests/integration/config/log4j.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
# Unspecified loggers and loggers with additivity=true output to server.log
# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise
log4j.rootLogger=INFO, kafkaAppender

log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log
log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

# Change the line below to adjust ZK client logging
log4j.logger.org.apache.zookeeper=INFO

# Change the two lines below to adjust the general broker logging level (output to server.log)
log4j.logger.kafka=INFO
log4j.logger.org.apache.kafka=INFO

# Change to DEBUG or TRACE to enable request logging
log4j.logger.kafka.request.logger=WARN, requestAppender
log4j.additivity.kafka.request.logger=false

# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output
# related to the handling of requests
#log4j.logger.kafka.network.Processor=TRACE, requestAppender
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
#log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender
log4j.additivity.kafka.network.RequestChannel$=false

log4j.logger.kafka.controller=TRACE, controllerAppender
log4j.additivity.kafka.controller=false

log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender
log4j.additivity.kafka.log.LogCleaner=false

log4j.logger.state.change.logger=INFO, stateChangeAppender
log4j.additivity.state.change.logger=false

# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses
log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender
log4j.additivity.kafka.authorizer.logger=false
Loading

0 comments on commit 7552049

Please sign in to comment.