Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ public static class Cluster {
List<@Valid Masking> masking;

AuditProperties audit;

}

@Data
Expand Down Expand Up @@ -113,6 +114,7 @@ public static class ConnectCluster {
public static class SchemaRegistryAuth {
String username;
String password;
String bearerAuthCustomProviderClass;
}

@Data
Expand Down
152 changes: 97 additions & 55 deletions api/src/main/java/io/kafbat/ui/controller/SchemasController.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import io.kafbat.ui.api.SchemasApi;
import io.kafbat.ui.exception.ValidationException;
import io.kafbat.ui.mapper.GcpKafkaSrMapper;
import io.kafbat.ui.mapper.GcpKafkaSrMapperImpl;
import io.kafbat.ui.mapper.KafkaSrMapper;
import io.kafbat.ui.mapper.KafkaSrMapperImpl;
import io.kafbat.ui.model.CompatibilityCheckResponseDTO;
Expand All @@ -12,6 +14,7 @@
import io.kafbat.ui.model.SchemaSubjectsResponseDTO;
import io.kafbat.ui.model.rbac.AccessContext;
import io.kafbat.ui.model.rbac.permission.SchemaAction;
import io.kafbat.ui.service.GcpSchemaRegistryService;
import io.kafbat.ui.service.SchemaRegistryService;
import io.kafbat.ui.service.mcp.McpTool;
import java.util.List;
Expand All @@ -34,18 +37,22 @@ public class SchemasController extends AbstractController implements SchemasApi,
private static final Integer DEFAULT_PAGE_SIZE = 25;

private final KafkaSrMapper kafkaSrMapper = new KafkaSrMapperImpl();
private final GcpKafkaSrMapper gcpKafkaSrMapper = new GcpKafkaSrMapperImpl();

private final SchemaRegistryService schemaRegistryService;
private final GcpSchemaRegistryService gcpSchemaRegistryService;

@Override
protected KafkaCluster getCluster(String clusterName) {
var c = super.getCluster(clusterName);
if (c.getSchemaRegistryClient() == null) {
if (c.getSchemaRegistryClient() == null && c.getGcpSchemaRegistryClient() == null) {
throw new ValidationException("Schema Registry is not set for cluster " + clusterName);
}
return c;
}



@Override
public Mono<ResponseEntity<CompatibilityCheckResponseDTO>> checkSchemaCompatibility(
String clusterName, String subject, @Valid Mono<NewSchemaSubjectDTO> newSchemaSubjectMono,
Expand All @@ -57,13 +64,16 @@ public Mono<ResponseEntity<CompatibilityCheckResponseDTO>> checkSchemaCompatibil
.build();

return validateAccess(context).then(
newSchemaSubjectMono.flatMap(subjectDTO ->
schemaRegistryService.checksSchemaCompatibility(
getCluster(clusterName),
subject,
kafkaSrMapper.fromDto(subjectDTO)
))
.map(kafkaSrMapper::toDto)
newSchemaSubjectMono.flatMap(subjectDTO -> {
var cluster = getCluster(clusterName);
return cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.checksSchemaCompatibility(
cluster, subject, gcpKafkaSrMapper.fromDto(subjectDTO))
.map(gcpKafkaSrMapper::toDto) :
schemaRegistryService.checksSchemaCompatibility(
cluster, subject, kafkaSrMapper.fromDto(subjectDTO))
.map(kafkaSrMapper::toDto);
})
.map(ResponseEntity::ok)
).doOnEach(sig -> audit(context, sig));
}
Expand All @@ -73,22 +83,23 @@ public Mono<ResponseEntity<SchemaSubjectDTO>> createNewSchema(
String clusterName, @Valid Mono<NewSchemaSubjectDTO> newSchemaSubjectMono,
ServerWebExchange exchange) {
return newSchemaSubjectMono.flatMap(newSubject -> {
var context = AccessContext.builder()
.cluster(clusterName)
.schemaActions(newSubject.getSubject(), SchemaAction.CREATE)
.operationName("createNewSchema")
.build();
return validateAccess(context).then(
var context = AccessContext.builder()
.cluster(clusterName)
.schemaActions(newSubject.getSubject(), SchemaAction.CREATE)
.operationName("createNewSchema")
.build();
var cluster = getCluster(clusterName);
return validateAccess(context).then(
cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.registerNewSchema(
cluster, newSubject.getSubject(), gcpKafkaSrMapper.fromDto(newSubject))
.map(gcpKafkaSrMapper::toDto) :
schemaRegistryService.registerNewSchema(
getCluster(clusterName),
newSubject.getSubject(),
kafkaSrMapper.fromDto(newSubject)
))
.map(kafkaSrMapper::toDto)
.map(ResponseEntity::ok)
.doOnEach(sig -> audit(context, sig));
}
);
cluster, newSubject.getSubject(), kafkaSrMapper.fromDto(newSubject))
.map(kafkaSrMapper::toDto))
.map(ResponseEntity::ok)
.doOnEach(sig -> audit(context, sig));
});
}

@Override
Expand All @@ -100,8 +111,11 @@ public Mono<ResponseEntity<Void>> deleteLatestSchema(
.operationName("deleteLatestSchema")
.build();

var cluster = getCluster(clusterName);
return validateAccess(context).then(
schemaRegistryService.deleteLatestSchemaSubject(getCluster(clusterName), subject)
(cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.deleteLatestSchemaSubject(cluster, subject) :
schemaRegistryService.deleteLatestSchemaSubject(cluster, subject))
.doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build())
);
Expand All @@ -116,8 +130,11 @@ public Mono<ResponseEntity<Void>> deleteSchema(
.operationName("deleteSchema")
.build();

var cluster = getCluster(clusterName);
return validateAccess(context).then(
schemaRegistryService.deleteSchemaSubjectEntirely(getCluster(clusterName), subject)
(cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.deleteSchemaSubjectEntirely(cluster, subject) :
schemaRegistryService.deleteSchemaSubjectEntirely(cluster, subject))
.doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build())
);
Expand All @@ -132,8 +149,11 @@ public Mono<ResponseEntity<Void>> deleteSchemaByVersion(
.operationName("deleteSchemaByVersion")
.build();

var cluster = getCluster(clusterName);
return validateAccess(context).then(
schemaRegistryService.deleteSchemaSubjectByVersion(getCluster(clusterName), subjectName, version)
(cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.deleteSchemaSubjectByVersion(cluster, subjectName, version) :
schemaRegistryService.deleteSchemaSubjectByVersion(cluster, subjectName, version))
.doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build())
);
Expand All @@ -148,9 +168,10 @@ public Mono<ResponseEntity<Flux<SchemaSubjectDTO>>> getAllVersionsBySubject(
.operationName("getAllVersionsBySubject")
.build();

Flux<SchemaSubjectDTO> schemas =
schemaRegistryService.getAllVersionsBySubject(getCluster(clusterName), subjectName)
.map(kafkaSrMapper::toDto);
var cluster = getCluster(clusterName);
Flux<SchemaSubjectDTO> schemas = cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.getAllVersionsBySubject(cluster, subjectName).map(gcpKafkaSrMapper::toDto) :
schemaRegistryService.getAllVersionsBySubject(cluster, subjectName).map(kafkaSrMapper::toDto);

return validateAccess(context)
.thenReturn(ResponseEntity.ok(schemas))
Expand All @@ -160,8 +181,12 @@ public Mono<ResponseEntity<Flux<SchemaSubjectDTO>>> getAllVersionsBySubject(
@Override
public Mono<ResponseEntity<CompatibilityLevelDTO>> getGlobalSchemaCompatibilityLevel(
String clusterName, ServerWebExchange exchange) {
return schemaRegistryService.getGlobalSchemaCompatibilityLevel(getCluster(clusterName))
.map(c -> new CompatibilityLevelDTO().compatibility(kafkaSrMapper.toDto(c)))
var cluster = getCluster(clusterName);
return (cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.getGlobalSchemaCompatibilityLevel(cluster)
.map(c -> new CompatibilityLevelDTO().compatibility(gcpKafkaSrMapper.toDto(c))) :
schemaRegistryService.getGlobalSchemaCompatibilityLevel(cluster)
.map(c -> new CompatibilityLevelDTO().compatibility(kafkaSrMapper.toDto(c))))
.map(ResponseEntity::ok)
.defaultIfEmpty(ResponseEntity.notFound().build());
}
Expand All @@ -176,9 +201,13 @@ public Mono<ResponseEntity<SchemaSubjectDTO>> getLatestSchema(String clusterName
.operationName("getLatestSchema")
.build();

var cluster = getCluster(clusterName);
return validateAccess(context).then(
schemaRegistryService.getLatestSchemaVersionBySubject(getCluster(clusterName), subject)
.map(kafkaSrMapper::toDto)
(cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.getLatestSchemaVersionBySubject(cluster, subject)
.map(gcpKafkaSrMapper::toDto) :
schemaRegistryService.getLatestSchemaVersionBySubject(cluster, subject)
.map(kafkaSrMapper::toDto))
.map(ResponseEntity::ok)
).doOnEach(sig -> audit(context, sig));
}
Expand All @@ -193,10 +222,13 @@ public Mono<ResponseEntity<SchemaSubjectDTO>> getSchemaByVersion(
.operationParams(Map.of("subject", subject, "version", version))
.build();

var cluster = getCluster(clusterName);
return validateAccess(context).then(
schemaRegistryService.getSchemaSubjectByVersion(
getCluster(clusterName), subject, version)
.map(kafkaSrMapper::toDto)
(cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.getSchemaSubjectByVersion(cluster, subject, version)
.map(gcpKafkaSrMapper::toDto) :
schemaRegistryService.getSchemaSubjectByVersion(cluster, subject, version)
.map(kafkaSrMapper::toDto))
.map(ResponseEntity::ok)
).doOnEach(sig -> audit(context, sig));
}
Expand All @@ -212,8 +244,10 @@ public Mono<ResponseEntity<SchemaSubjectsResponseDTO>> getSchemas(String cluster
.operationName("getSchemas")
.build();

return schemaRegistryService
.getAllSubjectNames(getCluster(clusterName))
var cluster = getCluster(clusterName);
return (cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.getAllSubjectNames(cluster) :
schemaRegistryService.getAllSubjectNames(cluster))
.flatMapIterable(l -> l)
.filterWhen(schema -> accessControlService.isSchemaAccessible(schema, clusterName))
.collectList()
Expand All @@ -230,9 +264,14 @@ public Mono<ResponseEntity<SchemaSubjectsResponseDTO>> getSchemas(String cluster
.skip(subjectToSkip)
.limit(pageSize)
.toList();
return schemaRegistryService.getAllLatestVersionSchemas(getCluster(clusterName), subjectsToRender)
.map(subjs -> subjs.stream().map(kafkaSrMapper::toDto).toList())
.map(subjs -> new SchemaSubjectsResponseDTO().pageCount(totalPages).schemas(subjs));
return (cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.getAllLatestVersionSchemas(cluster, subjectsToRender)
.map(subjs -> subjs.stream()
.map(gcpKafkaSrMapper::toDto).toList()) :
schemaRegistryService.getAllLatestVersionSchemas(cluster, subjectsToRender)
.map(subjs -> subjs.stream().map(kafkaSrMapper::toDto).toList()))
.map(subjs -> new SchemaSubjectsResponseDTO()
.pageCount(totalPages).schemas(subjs));
}).map(ResponseEntity::ok)
.doOnEach(sig -> audit(context, sig));
}
Expand All @@ -247,13 +286,15 @@ public Mono<ResponseEntity<Void>> updateGlobalSchemaCompatibilityLevel(
.operationName("updateGlobalSchemaCompatibilityLevel")
.build();

var cluster = getCluster(clusterName);
return validateAccess(context).then(
compatibilityLevelMono
.flatMap(compatibilityLevelDTO ->
schemaRegistryService.updateGlobalSchemaCompatibility(
getCluster(clusterName),
kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())
))
cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.updateGlobalSchemaCompatibility(
cluster, gcpKafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())) :
schemaRegistryService.updateGlobalSchemaCompatibility(
cluster, kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())))
.doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build())
);
Expand All @@ -271,15 +312,16 @@ public Mono<ResponseEntity<Void>> updateSchemaCompatibilityLevel(
.operationParams(Map.of("subject", subject))
.build();

return compatibilityLevelMono.flatMap(compatibilityLevelDTO ->
validateAccess(context).then(
schemaRegistryService.updateSchemaCompatibility(
getCluster(clusterName),
subject,
kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())
))
.doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build())
);
return compatibilityLevelMono.flatMap(compatibilityLevelDTO -> {
var cluster = getCluster(clusterName);
return validateAccess(context).then(
cluster.isGcpSchemaRegistryEnabled()
? gcpSchemaRegistryService.updateSchemaCompatibility(
cluster, subject, gcpKafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())) :
schemaRegistryService.updateSchemaCompatibility(
cluster, subject, kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())))
.doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build());
});
}
}
48 changes: 48 additions & 0 deletions api/src/main/java/io/kafbat/ui/mapper/GcpKafkaSrMapper.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
package io.kafbat.ui.mapper;

import io.kafbat.ui.model.CompatibilityCheckResponseDTO;
import io.kafbat.ui.model.CompatibilityLevelDTO;
import io.kafbat.ui.model.NewSchemaSubjectDTO;
import io.kafbat.ui.model.SchemaReferenceDTO;
import io.kafbat.ui.model.SchemaSubjectDTO;
import io.kafbat.ui.model.SchemaTypeDTO;
import io.kafbat.ui.service.GcpSchemaRegistryService;
import java.util.List;
import java.util.Optional;
import org.mapstruct.Mapper;

@Mapper
public interface GcpKafkaSrMapper {

// Convert GCP SubjectWithCompatibilityLevel to DTO
default SchemaSubjectDTO toDto(GcpSchemaRegistryService.SubjectWithCompatibilityLevel s) {
return new SchemaSubjectDTO()
.id(s.getId())
.version(s.getVersion())
.subject(s.getSubject())
.schema(s.getSchema())
.schemaType(SchemaTypeDTO.fromValue(
Optional.ofNullable(s.getSchemaType())
.orElse(io.kafbat.ui.gcp.sr.model.SchemaType.AVRO)
.getValue()))
.references(toDto(s.getReferences()))
.compatibilityLevel(Optional.ofNullable(s.getCompatibility())
.map(Object::toString).orElse(null));
}

// Convert GCP SchemaReference list to DTO list
List<SchemaReferenceDTO> toDto(List<io.kafbat.ui.gcp.sr.model.SchemaReference> references);

// Convert GCP CompatibilityCheckResponse to DTO
CompatibilityCheckResponseDTO toDto(io.kafbat.ui.gcp.sr.model.CompatibilityCheckResponse ccr);

// Convert GCP Compatibility to DTO enum
CompatibilityLevelDTO.CompatibilityEnum toDto(io.kafbat.ui.gcp.sr.model.Compatibility compatibility);

// Convert DTO to GCP NewSubject
io.kafbat.ui.gcp.sr.model.NewSubject fromDto(NewSchemaSubjectDTO subjectDto);

// Convert DTO enum to GCP Compatibility
io.kafbat.ui.gcp.sr.model.Compatibility fromDto(CompatibilityLevelDTO.CompatibilityEnum dtoEnum);

}
3 changes: 3 additions & 0 deletions api/src/main/java/io/kafbat/ui/model/KafkaCluster.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import io.kafbat.ui.config.ClustersProperties;
import io.kafbat.ui.connect.api.KafkaConnectClientApi;
import io.kafbat.ui.emitter.PollingSettings;
import io.kafbat.ui.gcp.sr.api.KafkaGcpSrClientApi;
import io.kafbat.ui.service.ksql.KsqlApiClient;
import io.kafbat.ui.service.masking.DataMasking;
import io.kafbat.ui.sr.api.KafkaSrClientApi;
Expand Down Expand Up @@ -31,6 +32,8 @@ public class KafkaCluster {
private final DataMasking masking;
private final PollingSettings pollingSettings;
private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
private final ReactiveFailover<KafkaGcpSrClientApi> gcpSchemaRegistryClient;
private final boolean isGcpSchemaRegistryEnabled;
private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
private final ReactiveFailover<KsqlApiClient> ksqlClient;
}
Loading
Loading