Skip to content

Commit a98d7c6

Browse files
author
Jerome Revillard
committed
Tests
1 parent 5104524 commit a98d7c6

File tree

149 files changed

+4482
-2676
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

149 files changed

+4482
-2676
lines changed

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -13,3 +13,5 @@ state.yaml
1313
plan.json
1414
test.py
1515
/generated/
16+
/.apt_generated/
17+
/.apt_generated_tests/

Dockerfile

+6-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,11 @@
1-
FROM openjdk:8-jre-slim
1+
FROM gradle:5.6.4-jdk8 AS build
2+
COPY --chown=gradle:gradle . /home/gradle/src
3+
WORKDIR /home/gradle/src
4+
RUN gradle clean build buildRelease -x test
25

6+
FROM openjdk:8-jre-slim
37
RUN apt-get update && apt-get --yes upgrade && \
48
apt-get install -y python3 python3-pip curl && \
59
rm -rf /var/lib/apt/lists/*
610

7-
COPY ./build/output/kafka-gitops /usr/local/bin/kafka-gitops
11+
COPY --from=build /home/gradle/src/build/output/kafka-gitops /usr/local/bin/kafka-gitops

build.gradle

+5
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,12 @@ dependencies {
3232
compile 'info.picocli:picocli:4.1.4'
3333

3434
implementation ('io.confluent:kafka-schema-registry-client:6.1.1')
35+
<<<<<<< HEAD
3536
implementation('com.flipkart.zjsonpatch:zjsonpatch:0.4.11')
37+
=======
38+
implementation ('io.confluent:kafka-json-schema-provider:6.1.1')
39+
implementation ('io.confluent:kafka-protobuf-serializer:6.1.1')
40+
>>>>>>> a208542 (Tests)
3641

3742
compile 'org.slf4j:slf4j-api:1.7.30'
3843
compile group: 'ch.qos.logback', name: 'logback-classic', version: '1.2.3'

docker/docker-compose.yml

+21
Original file line numberDiff line numberDiff line change
@@ -92,3 +92,24 @@ services:
9292
depends_on:
9393
- zoo1
9494

95+
schema-registry:
96+
image: confluentinc/cp-schema-registry:6.1.1
97+
hostname: schema-registry
98+
ports:
99+
- "8082:8082"
100+
environment:
101+
SCHEMA_REGISTRY_HOST_NAME: schema-registry
102+
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: "kafka1:19092,kafka2:19092,kafka3:19092"
103+
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: SASL_PLAINTEXT
104+
SCHEMA_REGISTRY_KAFKASTORE_SASL_MECHANISM: PLAIN
105+
SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:8082"
106+
SCHEMA_REGISTRY_GROUP_ID: "schema-registry-test"
107+
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/registry_jaas.conf"
108+
SCHEMA_REGISTRY_OPTS: "-Djava.security.auth.login.config=/etc/kafka/registry_jaas.conf"
109+
volumes:
110+
- ./config/registry_jaas.conf:/etc/kafka/registry_jaas.conf
111+
depends_on:
112+
- kafka1
113+
- kafka2
114+
- kafka3
115+

src/main/java/com/devshawn/kafka/gitops/StateManager.java

+46-56
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,27 @@
11
package com.devshawn.kafka.gitops;
22

3-
import ch.qos.logback.classic.Level;
4-
import ch.qos.logback.classic.Logger;
3+
import java.util.ArrayList;
4+
import java.util.List;
5+
import java.util.Map;
6+
import java.util.NoSuchElementException;
7+
import java.util.Optional;
8+
import java.util.concurrent.atomic.AtomicInteger;
9+
import java.util.concurrent.atomic.AtomicReference;
10+
import org.slf4j.LoggerFactory;
511
import com.devshawn.kafka.gitops.config.KafkaGitopsConfigLoader;
612
import com.devshawn.kafka.gitops.config.ManagerConfig;
13+
import com.devshawn.kafka.gitops.config.SchemaRegistryConfigLoader;
714
import com.devshawn.kafka.gitops.domain.confluent.ServiceAccount;
815
import com.devshawn.kafka.gitops.domain.options.GetAclOptions;
916
import com.devshawn.kafka.gitops.domain.plan.DesiredPlan;
1017
import com.devshawn.kafka.gitops.domain.state.AclDetails;
1118
import com.devshawn.kafka.gitops.domain.state.CustomAclDetails;
1219
import com.devshawn.kafka.gitops.domain.state.DesiredState;
1320
import com.devshawn.kafka.gitops.domain.state.DesiredStateFile;
21+
import com.devshawn.kafka.gitops.domain.state.SchemaDetails;
1422
import com.devshawn.kafka.gitops.domain.state.TopicDetails;
1523
import com.devshawn.kafka.gitops.domain.state.service.KafkaStreamsService;
24+
import com.devshawn.kafka.gitops.enums.SchemaCompatibility;
1625
import com.devshawn.kafka.gitops.exception.ConfluentCloudException;
1726
import com.devshawn.kafka.gitops.exception.InvalidAclDefinitionException;
1827
import com.devshawn.kafka.gitops.exception.MissingConfigurationException;
@@ -24,30 +33,26 @@
2433
import com.devshawn.kafka.gitops.service.KafkaService;
2534
import com.devshawn.kafka.gitops.service.ParserService;
2635
import com.devshawn.kafka.gitops.service.RoleService;
36+
import com.devshawn.kafka.gitops.service.SchemaRegistryService;
2737
import com.devshawn.kafka.gitops.util.LogUtil;
2838
import com.devshawn.kafka.gitops.util.StateUtil;
2939
import com.fasterxml.jackson.core.JsonParser;
40+
import com.fasterxml.jackson.core.util.DefaultIndenter;
41+
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
3042
import com.fasterxml.jackson.databind.DeserializationFeature;
3143
import com.fasterxml.jackson.databind.ObjectMapper;
44+
import com.fasterxml.jackson.databind.SerializationFeature;
3245
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
33-
import org.slf4j.LoggerFactory;
34-
35-
import java.util.ArrayList;
36-
import java.util.List;
37-
import java.util.Map;
38-
import java.util.NoSuchElementException;
39-
import java.util.Optional;
40-
import java.util.concurrent.atomic.AtomicInteger;
41-
import java.util.concurrent.atomic.AtomicReference;
46+
import ch.qos.logback.classic.Level;
47+
import ch.qos.logback.classic.Logger;
4248

4349
public class StateManager {
4450

45-
private static org.slf4j.Logger log = LoggerFactory.getLogger(StateManager.class);
46-
4751
private final ManagerConfig managerConfig;
4852
private final ObjectMapper objectMapper;
4953
private final ParserService parserService;
5054
private final KafkaService kafkaService;
55+
private final SchemaRegistryService schemaRegistryService;
5156
private final RoleService roleService;
5257
private final ConfluentCloudService confluentCloudService;
5358

@@ -163,7 +168,7 @@ private DesiredState getDesiredState() {
163168
}
164169

165170
private void generateTopicsState(DesiredState.Builder desiredState, DesiredStateFile desiredStateFile) {
166-
Optional<Integer> defaultReplication = StateUtil.fetchReplication(desiredStateFile);
171+
Optional<Integer> defaultReplication = StateUtil.fetchDefaultTopicsReplication(desiredStateFile);
167172
if (defaultReplication.isPresent()) {
168173
desiredStateFile.getTopics().forEach((name, details) -> {
169174
Integer replication = details.getReplication().isPresent() ? details.getReplication().get() : defaultReplication.get();
@@ -175,7 +180,15 @@ private void generateTopicsState(DesiredState.Builder desiredState, DesiredState
175180
}
176181

177182
private void generateSchemasState(DesiredState.Builder desiredState, DesiredStateFile desiredStateFile) {
178-
desiredState.putAllSchemas(desiredStateFile.getSchemas());
183+
Optional<SchemaCompatibility> defaultSchemaCompatibility = StateUtil.fetchDefaultSchemasCompatibility(desiredStateFile);
184+
if (defaultSchemaCompatibility.isPresent()) {
185+
desiredStateFile.getSchemas().forEach((s, details) -> {
186+
SchemaCompatibility compatibility = details.getCompatibility().isPresent() ? details.getCompatibility().get() : defaultSchemaCompatibility.get();
187+
desiredState.putSchemas(s, new SchemaDetails.Builder().mergeFrom(details).setCompatibility(compatibility).build());
188+
});
189+
} else {
190+
desiredState.putAllSchemas(desiredStateFile.getSchemas());
191+
}
179192
}
180193

181194
private void generateConfluentCloudServiceAcls(DesiredState.Builder desiredState, DesiredStateFile desiredStateFile) {
@@ -316,7 +329,7 @@ private void validateCustomAcls(DesiredStateFile desiredStateFile) {
316329
}
317330

318331
private void validateTopics(DesiredStateFile desiredStateFile) {
319-
Optional<Integer> defaultReplication = StateUtil.fetchReplication(desiredStateFile);
332+
Optional<Integer> defaultReplication = StateUtil.fetchDefaultTopicsReplication(desiredStateFile);
320333
if (!defaultReplication.isPresent()) {
321334
desiredStateFile.getTopics().forEach((name, details) -> {
322335
if (!details.getReplication().isPresent()) {
@@ -331,42 +344,13 @@ private void validateTopics(DesiredStateFile desiredStateFile) {
331344
}
332345

333346
private void validateSchemas(DesiredStateFile desiredStateFile) {
334-
if (!desiredStateFile.getSchemas().isEmpty()) {
335-
SchemaRegistryConfig schemaRegistryConfig = SchemaRegistryConfigLoader.load();
336-
desiredStateFile.getSchemas().forEach((s, schemaDetails) -> {
337-
if (!schemaDetails.getType().equalsIgnoreCase("Avro")) {
338-
throw new ValidationException(String.format("Schema type %s is currently not supported.", schemaDetails.getType()));
339-
}
340-
if (!Files.exists(Paths.get(schemaRegistryConfig.getConfig().get("SCHEMA_DIRECTORY") + "/" + schemaDetails.getFile()))) {
341-
throw new ValidationException(String.format("Schema file %s not found in schema directory at %s", schemaDetails.getFile(), schemaRegistryConfig.getConfig().get("SCHEMA_DIRECTORY")));
342-
}
343-
if (schemaDetails.getType().equalsIgnoreCase("Avro")) {
344-
AvroSchemaProvider avroSchemaProvider = new AvroSchemaProvider();
345-
if (schemaDetails.getReferences().isEmpty() && schemaDetails.getType().equalsIgnoreCase("Avro")) {
346-
Optional<ParsedSchema> parsedSchema = avroSchemaProvider.parseSchema(schemaRegistryService.loadSchemaFromDisk(schemaDetails.getFile()), Collections.emptyList());
347-
if (!parsedSchema.isPresent()) {
348-
throw new ValidationException(String.format("Avro schema %s could not be parsed.", schemaDetails.getFile()));
349-
}
350-
} else {
351-
List<SchemaReference> schemaReferences = new ArrayList<>();
352-
schemaDetails.getReferences().forEach(referenceDetails -> {
353-
SchemaReference schemaReference = new SchemaReference(referenceDetails.getName(), referenceDetails.getSubject(), referenceDetails.getVersion());
354-
schemaReferences.add(schemaReference);
355-
});
356-
// we need to pass a schema registry client as a config because the underlying code validates against the current state
357-
avroSchemaProvider.configure(Collections.singletonMap(SchemaProvider.SCHEMA_VERSION_FETCHER_CONFIG, schemaRegistryService.createSchemaRegistryClient()));
358-
try {
359-
Optional<ParsedSchema> parsedSchema = avroSchemaProvider.parseSchema(schemaRegistryService.loadSchemaFromDisk(schemaDetails.getFile()), schemaReferences);
360-
if (!parsedSchema.isPresent()) {
361-
throw new ValidationException(String.format("Avro schema %s could not be parsed.", schemaDetails.getFile()));
362-
}
363-
} catch (IllegalStateException ex) {
364-
throw new ValidationException(String.format("Reference validation error: %s", ex.getMessage()));
365-
} catch (RuntimeException ex) {
366-
throw new ValidationException(String.format("Error thrown when attempting to validate schema with reference", ex.getMessage()));
367-
}
368-
}
347+
Optional<SchemaCompatibility> defaultSchemaCompatibility = StateUtil.fetchDefaultSchemasCompatibility(desiredStateFile);
348+
if (!defaultSchemaCompatibility.isPresent()) {
349+
desiredStateFile.getSchemas().forEach((subject, details) -> {
350+
if (!details.getCompatibility().isPresent()) {
351+
throw new ValidationException(String.format("Not set: [compatibility] in state file definition: schema -> %s", subject));
369352
}
353+
schemaRegistryService.validateSchema(subject, details);
370354
});
371355
}
372356
}
@@ -379,11 +363,17 @@ private boolean isConfluentCloudEnabled(DesiredStateFile desiredStateFile) {
379363
}
380364

381365
private ObjectMapper initializeObjectMapper() {
382-
ObjectMapper objectMapper = new ObjectMapper();
383-
objectMapper.enable(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES);
384-
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
385-
objectMapper.registerModule(new Jdk8Module());
386-
return objectMapper;
366+
ObjectMapper gitopsObjectMapper = new ObjectMapper();
367+
gitopsObjectMapper.enable(SerializationFeature.INDENT_OUTPUT);
368+
gitopsObjectMapper.enable(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES);
369+
gitopsObjectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
370+
gitopsObjectMapper.registerModule(new Jdk8Module());
371+
DefaultIndenter defaultIndenter = new DefaultIndenter(" ", DefaultIndenter.SYS_LF);
372+
DefaultPrettyPrinter printer = new DefaultPrettyPrinter()
373+
.withObjectIndenter(defaultIndenter)
374+
.withArrayIndenter(defaultIndenter);
375+
gitopsObjectMapper.setDefaultPrettyPrinter(printer);
376+
return gitopsObjectMapper;
387377
}
388378

389379
private void initializeLogger(boolean verbose) {

src/main/java/com/devshawn/kafka/gitops/config/SchemaRegistryConfigLoader.java

+26-19
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,14 @@ public class SchemaRegistryConfigLoader {
1212

1313
private static org.slf4j.Logger log = LoggerFactory.getLogger(SchemaRegistryConfigLoader.class);
1414

15+
public static final String SCHEMA_REGISTRY_URL_KEY = "SCHEMA_REGISTRY_URL";
16+
public static final String SCHEMA_DIRECTORY_KEY = "SCHEMA_DIRECTORY";
17+
public static final String SCHEMA_REGISTRY_SASL_JAAS_USERNAME_KEY = "SCHEMA_REGISTRY_SASL_JAAS_USERNAME";
18+
public static final String SCHEMA_REGISTRY_SASL_JAAS_PASSWORD_KEY = "SCHEMA_REGISTRY_SASL_JAAS_PASSWORD";
19+
public static final String SCHEMA_REGISTRY_SASL_CONFIG_KEY = "SCHEMA_REGISTRY_SASL_CONFIG";
20+
21+
private SchemaRegistryConfigLoader() {}
22+
1523
public static SchemaRegistryConfig load() {
1624
SchemaRegistryConfig.Builder builder = new SchemaRegistryConfig.Builder();
1725
setConfig(builder);
@@ -26,14 +34,14 @@ private static void setConfig(SchemaRegistryConfig.Builder builder) {
2634
Map<String, String> environment = System.getenv();
2735

2836
environment.forEach((key, value) -> {
29-
if (key.equals("SCHEMA_REGISTRY_SASL_JAAS_USERNAME")) {
37+
if (key.equals(SCHEMA_REGISTRY_SASL_JAAS_USERNAME_KEY)) {
3038
username.set(value);
31-
} else if (key.equals("SCHEMA_REGISTRY_SASL_JAAS_PASSWORD")) {
39+
} else if (key.equals(SCHEMA_REGISTRY_SASL_JAAS_PASSWORD_KEY)) {
3240
password.set(value);
33-
} else if (key.equals("SCHEMA_REGISTRY_URL")) {
34-
config.put("SCHEMA_REGISTRY_URL", value);
35-
} else if (key.equals("SCHEMA_DIRECTORY")) {
36-
config.put("SCHEMA_DIRECTORY", value);
41+
} else if (key.equals(SCHEMA_REGISTRY_URL_KEY)) {
42+
config.put(SCHEMA_REGISTRY_URL_KEY, value);
43+
} else if (key.equals(SCHEMA_DIRECTORY_KEY)) {
44+
config.put(SCHEMA_DIRECTORY_KEY, value);
3745
}
3846
});
3947

@@ -48,13 +56,13 @@ private static void setConfig(SchemaRegistryConfig.Builder builder) {
4856
private static void handleDefaultConfig(Map<String, Object> config) {
4957
final String DEFAULT_URL = "http://localhost:8081";
5058
final String CURRENT_WORKING_DIR = System.getProperty("user.dir");
51-
if (!config.containsKey("SCHEMA_REGISTRY_URL")) {
52-
log.info("SCHEMA_REGISTRY_URL not set. Using default value of {}", DEFAULT_URL);
53-
config.put("SCHEMA_REGISTRY_URL", DEFAULT_URL);
59+
if (!config.containsKey(SCHEMA_REGISTRY_URL_KEY)) {
60+
log.info("{} not set. Using default value of {}", SCHEMA_REGISTRY_URL_KEY, DEFAULT_URL);
61+
config.put(SCHEMA_REGISTRY_URL_KEY, DEFAULT_URL);
5462
}
55-
if (!config.containsKey("SCHEMA_DIRECTORY")) {
56-
log.info("SCHEMA_DIRECTORY not set. Defaulting to current working directory: {}", CURRENT_WORKING_DIR);
57-
config.put("SCHEMA_DIRECTORY", CURRENT_WORKING_DIR);
63+
if (!config.containsKey(SCHEMA_DIRECTORY_KEY)) {
64+
log.info("{} not set. Defaulting to current working directory: {}", SCHEMA_DIRECTORY_KEY, CURRENT_WORKING_DIR);
65+
config.put(SCHEMA_DIRECTORY_KEY, CURRENT_WORKING_DIR);
5866
}
5967
}
6068

@@ -63,13 +71,12 @@ private static void handleAuthentication(AtomicReference<String> username, Atomi
6371
String loginModule = "org.apache.kafka.common.security.plain.PlainLoginModule";
6472
String value = String.format("%s required username=\"%s\" password=\"%s\";",
6573
loginModule, escape(username.get()), escape(password.get()));
66-
config.put("SCHEMA_REGISTRY_SASL_CONFIG", value);
67-
} else if (username.get() != null) {
68-
throw new MissingConfigurationException("SCHEMA_REGISTRY_SASL_JAAS_USERNAME");
69-
} else if (password.get() != null) {
70-
throw new MissingConfigurationException("SCHEMA_REGISTRY_SASL_JAAS_PASSWORD");
71-
} else if (username.get() == null & password.get() == null) {
72-
throw new MissingMultipleConfigurationException("SCHEMA_REGISTRY_SASL_JAAS_PASSWORD", "SCHEMA_REGISTRY_SASL_JAAS_USERNAME");
74+
config.put(SCHEMA_REGISTRY_SASL_CONFIG_KEY, value);
75+
} else {
76+
if(config.get(SCHEMA_REGISTRY_SASL_CONFIG_KEY) == null) {
77+
log.info("{} or {} not set. No authentication configured for the Schema Registry",
78+
SCHEMA_REGISTRY_SASL_JAAS_USERNAME_KEY, SCHEMA_REGISTRY_SASL_JAAS_PASSWORD_KEY);
79+
}
7380
}
7481
}
7582

0 commit comments

Comments
 (0)